Detailed changes
@@ -1,10 +1,28 @@
-Closes #ISSUE
+## Context
-Before you mark this PR as ready for review, make sure that you have:
-- [ ] Added a solid test coverage and/or screenshots from doing manual testing
-- [ ] Done a self-review taking into account security and performance aspects
-- [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
+<!-- What does this PR do, and why? How is it expected to impact users?
+ Not just what changed, but what motivated it and why this approach.
+
+ Link to Linear issue (e.g., ENG-123) or GitHub issue (e.g., Closes #456)
+ if one exists — helps with traceability. -->
+
+## How to Review
+
+<!-- Help reviewers focus their attention:
+ - For small PRs: note what to focus on (e.g., "error handling in foo.rs")
+ - For large PRs (>400 LOC): provide a guided tour — numbered list of
+ files/commits to read in order. (The `large-pr` label is applied automatically.)
+ - See the review process guidelines for comment conventions -->
+
+## Self-Review Checklist
+
+<!-- Check before requesting review: -->
+- [ ] I've reviewed my own diff for quality, security, and reliability
+- [ ] Unsafe blocks (if any) have justifying comments
+- [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
+- [ ] Tests cover the new/changed behavior
+- [ ] Performance impact has been considered and is acceptable
Release Notes:
-- N/A *or* Added/Fixed/Improved ...
+- N/A or Added/Fixed/Improved ...
@@ -10,25 +10,43 @@
# AUTH NOTE: Uses a GitHub App (COORDINATOR_APP_ID + COORDINATOR_APP_PRIVATE_KEY)
# for all API operations: cloning the private coordinator repo, requesting team
# reviewers, and setting PR assignees. GITHUB_TOKEN is not used.
+#
+# SECURITY INVARIANTS (pull_request_target):
+# This workflow runs with access to secrets for ALL PRs including forks.
+# It is safe ONLY because:
+# 1. The checkout is the coordinator repo at ref: main — NEVER the PR head/branch
+# 2. No ${{ }} interpolation of event fields in run: blocks — all routed via env:
+# 3. The script never executes, sources, or reads files from the PR branch
+# Violating any of these enables remote code execution with secret access.
name: Assign Reviewers
on:
- pull_request:
+ # zizmor: ignore[dangerous-triggers] reviewed — no PR code checkout, only coordinator repo at ref: main
+ pull_request_target:
types: [opened, ready_for_review]
# GITHUB_TOKEN is not used — all operations use the GitHub App token.
# Declare minimal permissions so the default token has no write access.
permissions: {}
-# Only run for PRs from within the org (not forks) — fork PRs don't have
-# write access to request team reviewers.
+# Prevent duplicate runs for the same PR (e.g., rapid push + ready_for_review).
+concurrency:
+ group: assign-reviewers-${{ github.event.pull_request.number }}
+ cancel-in-progress: true
+
+# NOTE: For ready_for_review events, the webhook payload may still carry
+# draft: true due to a GitHub race condition (payload serialized before DB
+# update). We trust the event type instead — the script rechecks draft status
+# via a live API call as defense-in-depth.
+#
+# No author_association filter — external and fork PRs also get reviewer
+# assignments. Assigned reviewers are inherently scoped to org team members
+# by the GitHub Teams API.
jobs:
assign-reviewers:
if: >-
- github.event.pull_request.head.repo.full_name == github.repository &&
- github.event.pull_request.draft == false &&
- contains(fromJSON('["MEMBER", "OWNER"]'), github.event.pull_request.author_association)
+ github.event.action == 'ready_for_review' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Generate app token
@@ -39,6 +57,8 @@ jobs:
private-key: ${{ secrets.COORDINATOR_APP_PRIVATE_KEY }}
repositories: codeowner-coordinator,zed
+ # SECURITY: checks out the coordinator repo at ref: main, NOT the PR branch.
+ # persist-credentials: false prevents the token from leaking into .git/config.
- name: Checkout coordinator repo
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
with:
@@ -54,7 +74,9 @@ jobs:
python-version: "3.11"
- name: Install dependencies
- run: pip install pyyaml==6.0.3
+ run: |
+ pip install --no-deps -q --only-binary ':all:' \
+ -r /dev/stdin <<< "pyyaml==6.0.3 --hash=sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"
- name: Assign reviewers
env:
@@ -69,7 +91,6 @@ jobs:
--rules-file team-membership-rules.yml \
--repo "$TARGET_REPO" \
--org zed-industries \
- --min-association member \
2>&1 | tee /tmp/assign-reviewers-output.txt
- name: Upload output
@@ -7,6 +7,8 @@ on:
- main
paths:
- extensions/**
+ - '!extensions/slash-commands-example/**'
+ - '!extensions/test-extension/**'
- '!extensions/workflows/**'
- '!extensions/*.md'
jobs:
@@ -231,7 +231,7 @@ jobs:
echo "extension_id=${EXTENSION_ID}" >> "$GITHUB_OUTPUT"
- id: extension-update
name: extension_bump::release_action
- uses: zed-extensions/update-action@72da482880c2f32ec8aa6e0a0427ab92d52ae32d
+ uses: huacnlee/zed-extension-action@82920ff0876879f65ffbcfa3403589114a8919c6
with:
extension-name: ${{ steps.get-extension-id.outputs.extension_id }}
push-to: zed-industries/extensions
@@ -269,12 +269,22 @@ jobs:
return;
}
+ // Assign staff member responsible for the bump
+ const pullNumber = parseInt(prNumber);
+
+ await github.rest.issues.addAssignees({
+ owner: 'zed-industries',
+ repo: 'extensions',
+ issue_number: pullNumber,
+ assignees: [author]
+ });
+ console.log(`Assigned ${author} to PR #${prNumber} in zed-industries/extensions`);
// Get the GraphQL node ID
const { data: pr } = await github.rest.pulls.get({
owner: 'zed-industries',
repo: 'extensions',
- pull_number: parseInt(prNumber)
+ pull_number: pullNumber
});
await github.graphql(`
@@ -0,0 +1,114 @@
+# Hotfix Review Monitor
+#
+# Runs daily and checks for merged PRs with the 'hotfix' label that have not
+# received a post-merge review approval within one business day. Posts a summary to
+# Slack if any are found. This is a SOC2 compensating control for the
+# emergency hotfix fast path.
+#
+# Security note: No untrusted input (PR titles, bodies, etc.) is interpolated
+# into shell commands. All PR metadata is read via gh API + jq, not via
+# github.event context expressions.
+#
+# Required secrets:
+# SLACK_WEBHOOK_PR_REVIEW_BOT - Incoming webhook URL for the #pr-review-ops channel
+
+name: Hotfix Review Monitor
+
+on:
+ schedule:
+ - cron: "30 13 * * 1-5" # 1:30 PM UTC weekdays
+ workflow_dispatch: {}
+
+permissions:
+ contents: read
+ pull-requests: read
+
+jobs:
+ check-hotfix-reviews:
+ if: github.repository_owner == 'zed-industries'
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ env:
+ REPO: ${{ github.repository }}
+ steps:
+ - name: Find unreviewed hotfixes
+ id: check
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ # 80h lookback covers the Friday-to-Monday gap (72h) with buffer.
+ # Overlap on weekdays is harmless — reviewed PRs are filtered out below.
+ SINCE=$(date -u -v-80H +%Y-%m-%dT%H:%M:%SZ 2>/dev/null \
+ || date -u -d '80 hours ago' +%Y-%m-%dT%H:%M:%SZ)
+ SINCE_DATE=$(echo "$SINCE" | cut -dT -f1)
+
+ # Use the Search API to find hotfix PRs merged in the lookback window.
+ # The Pulls API with state=closed paginates through all closed PRs in
+ # the repo, which times out on large repos. The Search API supports
+ # merged:>DATE natively so GitHub does the filtering server-side.
+ gh api --paginate \
+ "search/issues?q=repo:${REPO}+is:pr+is:merged+label:hotfix+merged:>${SINCE_DATE}&per_page=100" \
+ --jq '[.items[] | {number, title, merged_at: .pull_request.merged_at}]' \
+ > /tmp/hotfix_prs.json
+
+ # Check each hotfix PR for a post-merge approving review
+ jq -r '.[].number' /tmp/hotfix_prs.json | while read -r PR_NUMBER; do
+ APPROVALS=$(gh api \
+ "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \
+ --jq "[.[] | select(.state == \"APPROVED\")] | length")
+
+ if [ "$APPROVALS" -eq 0 ]; then
+ jq ".[] | select(.number == ${PR_NUMBER})" /tmp/hotfix_prs.json
+ fi
+ done | jq -s '.' > /tmp/unreviewed.json
+
+ COUNT=$(jq 'length' /tmp/unreviewed.json)
+ echo "count=$COUNT" >> "$GITHUB_OUTPUT"
+
+ - name: Notify Slack
+ if: steps.check.outputs.count != '0'
+ env:
+ SLACK_WEBHOOK_PR_REVIEW_BOT: ${{ secrets.SLACK_WEBHOOK_PR_REVIEW_BOT }}
+ COUNT: ${{ steps.check.outputs.count }}
+ run: |
+ # Build Block Kit payload from JSON — no shell interpolation of PR titles.
+ # Why jq? PR titles are attacker-controllable input. By reading them
+ # through jq -r from the JSON file and passing the result to jq --arg,
+ # the content stays safely JSON-encoded in the final payload. Block Kit
+ # doesn't change this — the same jq pipeline feeds into the blocks
+ # structure instead of plain text.
+ PRS=$(jq -r '.[] | "• <https://github.com/'"${REPO}"'/pull/\(.number)|#\(.number)> — \(.title) (merged \(.merged_at | split("T")[0]))"' /tmp/unreviewed.json)
+
+ jq -n \
+ --arg count "$COUNT" \
+ --arg prs "$PRS" \
+ '{
+ text: ($count + " hotfix PR(s) still need post-merge review"),
+ blocks: [
+ {
+ type: "section",
+ text: {
+ type: "mrkdwn",
+ text: (":rotating_light: *" + $count + " Hotfix PR(s) Need Post-Merge Review*")
+ }
+ },
+ {
+ type: "section",
+ text: { type: "mrkdwn", text: $prs }
+ },
+ { type: "divider" },
+ {
+ type: "context",
+ elements: [{
+ type: "mrkdwn",
+ text: "Hotfix PRs require review within one business day of merge."
+ }]
+ }
+ ]
+ }' | \
+ curl -s -X POST "$SLACK_WEBHOOK_PR_REVIEW_BOT" \
+ -H 'Content-Type: application/json' \
+ -d @-
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
@@ -0,0 +1,109 @@
+# PR Size Check — Compute
+#
+# Calculates PR size and saves the result as an artifact. A companion
+# workflow (pr-size-label.yml) picks up the artifact via workflow_run
+# and applies labels + comments with write permissions.
+#
+# This two-workflow split is required because fork PRs receive a
+# read-only GITHUB_TOKEN. The compute step needs no write access;
+# the label/comment step runs via workflow_run on the base repo with
+# full write permissions.
+#
+# Security note: This workflow only reads PR file data via the JS API
+# and writes a JSON artifact. No untrusted input is interpolated into
+# shell commands.
+
+name: PR Size Check
+
+on:
+ pull_request:
+ types: [opened, synchronize]
+
+permissions:
+ contents: read
+ pull-requests: read
+
+jobs:
+ compute-size:
+ if: github.repository_owner == 'zed-industries'
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ steps:
+ - name: Calculate PR size
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ with:
+ script: |
+ const fs = require('fs');
+
+ const { data: files } = await github.rest.pulls.listFiles({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ pull_number: context.issue.number,
+ per_page: 300,
+ });
+
+ // Sum additions + deletions, excluding generated/lock files
+ const IGNORED_PATTERNS = [
+ /\.lock$/,
+ /^Cargo\.lock$/,
+ /pnpm-lock\.yaml$/,
+ /\.generated\./,
+ /\/fixtures\//,
+ /\/snapshots\//,
+ ];
+
+ let totalChanges = 0;
+ for (const file of files) {
+ const ignored = IGNORED_PATTERNS.some(p => p.test(file.filename));
+ if (!ignored) {
+ totalChanges += file.additions + file.deletions;
+ }
+ }
+
+ // Assign size bracket
+ const SIZE_BRACKETS = [
+ ['Size S', 0, 100, '0e8a16'],
+ ['Size M', 100, 400, 'fbca04'],
+ ['Size L', 400, 800, 'e99695'],
+ ['Size XL', 800, Infinity, 'b60205'],
+ ];
+
+ let sizeLabel = 'Size S';
+ let labelColor = '0e8a16';
+ for (const [label, min, max, color] of SIZE_BRACKETS) {
+ if (totalChanges >= min && totalChanges < max) {
+ sizeLabel = label;
+ labelColor = color;
+ break;
+ }
+ }
+
+ // Check if the author wrote content in the "How to Review" section.
+ const rawBody = context.payload.pull_request.body || '';
+ const howToReview = rawBody.match(/## How to Review\s*\n([\s\S]*?)(?=\n## |$)/i);
+ const hasReviewGuidance = howToReview
+ ? howToReview[1].replace(/<!--[\s\S]*?-->/g, '').trim().length > 0
+ : false;
+
+ const result = {
+ pr_number: context.issue.number,
+ total_changes: totalChanges,
+ size_label: sizeLabel,
+ label_color: labelColor,
+ has_review_guidance: hasReviewGuidance,
+ };
+
+ console.log(`PR #${result.pr_number}: ${totalChanges} LOC, ${sizeLabel}`);
+
+ fs.mkdirSync('pr-size', { recursive: true });
+ fs.writeFileSync('pr-size/result.json', JSON.stringify(result));
+
+ - name: Upload size result
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+ with:
+ name: pr-size-result
+ path: pr-size/
+ retention-days: 1
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
@@ -0,0 +1,195 @@
+# PR Size Check — Label & Comment
+#
+# Triggered by workflow_run after pr-size-check.yml completes.
+# Downloads the size result artifact and applies labels + comments.
+#
+# This runs on the base repo with full GITHUB_TOKEN write access,
+# so it works for both same-repo and fork PRs.
+#
+# Security note: The artifact is treated as untrusted data — only
+# structured JSON fields (PR number, size label, color, boolean) are
+# read. No artifact content is executed or interpolated into shell.
+
+name: PR Size Label
+
+on:
+ workflow_run:
+ workflows: ["PR Size Check"]
+ types: [completed]
+
+jobs:
+ apply-labels:
+ if: >
+ github.repository_owner == 'zed-industries' &&
+ github.event.workflow_run.conclusion == 'success'
+ permissions:
+ contents: read
+ pull-requests: write
+ issues: write
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ steps:
+ - name: Download size result artifact
+ id: download
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ with:
+ script: |
+ const fs = require('fs');
+ const path = require('path');
+
+ const allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: context.payload.workflow_run.id,
+ });
+
+ const match = allArtifacts.data.artifacts.find(a => a.name === 'pr-size-result');
+ if (!match) {
+ console.log('No pr-size-result artifact found, skipping');
+ core.setOutput('found', 'false');
+ return;
+ }
+
+ const download = await github.rest.actions.downloadArtifact({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ artifact_id: match.id,
+ archive_format: 'zip',
+ });
+
+ const temp = path.join(process.env.RUNNER_TEMP, 'pr-size');
+ fs.mkdirSync(temp, { recursive: true });
+ fs.writeFileSync(path.join(temp, 'result.zip'), Buffer.from(download.data));
+ core.setOutput('found', 'true');
+
+ - name: Unzip artifact
+ if: steps.download.outputs.found == 'true'
+ env:
+ ARTIFACT_DIR: ${{ runner.temp }}/pr-size
+ run: unzip "$ARTIFACT_DIR/result.zip" -d "$ARTIFACT_DIR"
+
+ - name: Apply labels and comment
+ if: steps.download.outputs.found == 'true'
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ with:
+ script: |
+ const fs = require('fs');
+ const path = require('path');
+
+ const temp = path.join(process.env.RUNNER_TEMP, 'pr-size');
+ const resultPath = path.join(temp, 'result.json');
+ if (!fs.existsSync(resultPath)) {
+ console.log('No result.json found, skipping');
+ return;
+ }
+
+ const result = JSON.parse(fs.readFileSync(resultPath, 'utf8'));
+
+ // Validate artifact data (treat as untrusted)
+ const prNumber = Number(result.pr_number);
+ const totalChanges = Number(result.total_changes);
+ const sizeLabel = String(result.size_label);
+ const labelColor = String(result.label_color);
+ const hasReviewGuidance = Boolean(result.has_review_guidance);
+
+ if (!prNumber || !sizeLabel.startsWith('Size ')) {
+ core.setFailed(`Invalid artifact data: pr=${prNumber}, label=${sizeLabel}`);
+ return;
+ }
+
+ console.log(`PR #${prNumber}: ${totalChanges} LOC, ${sizeLabel}`);
+
+ // --- Size label (idempotent) ---
+ const existingLabels = (await github.rest.issues.listLabelsOnIssue({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: prNumber,
+ })).data.map(l => l.name);
+
+ const existingSizeLabels = existingLabels.filter(l => l.startsWith('Size '));
+ const alreadyCorrect = existingSizeLabels.length === 1 && existingSizeLabels[0] === sizeLabel;
+
+ if (!alreadyCorrect) {
+ for (const label of existingSizeLabels) {
+ await github.rest.issues.removeLabel({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: prNumber,
+ name: label,
+ });
+ }
+
+ try {
+ await github.rest.issues.createLabel({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ name: sizeLabel,
+ color: labelColor,
+ });
+ } catch (e) {
+ if (e.status !== 422) throw e;
+ }
+
+ await github.rest.issues.addLabels({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: prNumber,
+ labels: [sizeLabel],
+ });
+ }
+
+ // --- Large PR handling (400+ LOC) ---
+ if (totalChanges >= 400) {
+ if (!existingLabels.includes('large-pr')) {
+ try {
+ await github.rest.issues.createLabel({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ name: 'large-pr',
+ color: 'e99695',
+ });
+ } catch (e) {
+ if (e.status !== 422) throw e;
+ }
+
+ await github.rest.issues.addLabels({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: prNumber,
+ labels: ['large-pr'],
+ });
+ }
+
+ // Comment once with guidance
+ const MARKER = '<!-- pr-size-check -->';
+ const { data: comments } = await github.rest.issues.listComments({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: prNumber,
+ });
+
+ const alreadyCommented = comments.some(c => c.body.includes(MARKER));
+ if (!alreadyCommented) {
+ let body = `${MARKER}\n`;
+ body += `### :straight_ruler: PR Size: **${totalChanges} lines changed** (${sizeLabel})\n\n`;
+ body += `Please note: this PR exceeds the 400 LOC soft limit.\n`;
+ body += `- Consider **splitting** into separate PRs if the changes are separable\n`;
+ body += `- Ensure the PR description includes a **guided tour** in the "How to Review" section so reviewers know where to start\n`;
+
+ if (hasReviewGuidance) {
+ body += `\n:white_check_mark: "How to Review" section appears to include guidance — thank you!\n`;
+ }
+
+ await github.rest.issues.createComment({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: prNumber,
+ body: body,
+ });
+ }
+ }
+
+ console.log(`PR #${prNumber}: labeled ${sizeLabel}, done`);
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
@@ -58,8 +58,10 @@ jobs:
'iksuddle',
'ishaksebsib',
'lingyaochu',
+ 'loadingalias',
'marcocondrache',
'mchisolm0',
+ 'mostlyKIGuess',
'nairadithya',
'nihalxkumar',
'notJoon',
@@ -264,6 +264,39 @@ jobs:
- name: steps::show_sccache_stats
run: sccache --show-stats || true
timeout-minutes: 60
+ clippy_mac_x86_64:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true' && github.event_name != 'merge_group'
+ runs-on: namespace-profile-mac-large
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
+ path: ~/.rustup
+ - name: steps::install_rustup_target
+ run: rustup target add x86_64-apple-darwin
+ - name: steps::setup_sccache
+ run: ./script/setup-sccache
+ env:
+ R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
+ R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
+ R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
+ SCCACHE_BUCKET: sccache-zed
+ - name: steps::clippy
+ run: ./script/clippy --target x86_64-apple-darwin
+ - name: steps::show_sccache_stats
+ run: sccache --show-stats || true
+ timeout-minutes: 60
run_tests_windows:
needs:
- orchestrate
@@ -732,6 +765,7 @@ jobs:
- clippy_windows
- clippy_linux
- clippy_mac
+ - clippy_mac_x86_64
- run_tests_windows
- run_tests_linux
- run_tests_mac
@@ -761,6 +795,7 @@ jobs:
check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS"
check_result "clippy_linux" "$RESULT_CLIPPY_LINUX"
check_result "clippy_mac" "$RESULT_CLIPPY_MAC"
+ check_result "clippy_mac_x86_64" "$RESULT_CLIPPY_MAC_X86_64"
check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS"
check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX"
check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC"
@@ -780,6 +815,7 @@ jobs:
RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+ RESULT_CLIPPY_MAC_X86_64: ${{ needs.clippy_mac_x86_64.result }}
RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
@@ -0,0 +1,115 @@
+# Stale PR Review Reminder
+#
+# Runs daily on weekdays (second run at 8 PM UTC disabled during rollout) and posts a Slack summary of open PRs that
+# have been awaiting review for more than 72 hours. Team-level signal only —
+# no individual shaming.
+#
+# Security note: No untrusted input is interpolated into shell commands.
+# All PR metadata is read via gh API + jq.
+#
+# Required secrets:
+# SLACK_WEBHOOK_PR_REVIEW_BOT - Incoming webhook URL for the #pr-review-ops channel
+
+name: Stale PR Review Reminder
+
+on:
+ schedule:
+ - cron: "0 14 * * 1-5" # 2 PM UTC weekdays
+ # - cron: "0 20 * * 1-5" # 8 PM UTC weekdays — enable after initial rollout
+ workflow_dispatch: {}
+
+permissions:
+ contents: read
+ pull-requests: read
+
+jobs:
+ check-stale-prs:
+ if: github.repository_owner == 'zed-industries'
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ env:
+ REPO: ${{ github.repository }}
+ # Only surface PRs created on or after this date. Update this if the
+ # review process enforcement date changes.
+ PROCESS_START_DATE: "2026-03-19T00:00:00Z"
+ steps:
+ - name: Find PRs awaiting review longer than 72h
+ id: stale
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ CUTOFF=$(date -u -v-72H +%Y-%m-%dT%H:%M:%SZ 2>/dev/null \
+ || date -u -d '72 hours ago' +%Y-%m-%dT%H:%M:%SZ)
+
+ # Get open, non-draft PRs with pending review requests, created before cutoff
+ # but after the review process start date (to exclude pre-existing backlog)
+ gh api --paginate \
+ "repos/${REPO}/pulls?state=open&sort=updated&direction=asc&per_page=100" \
+ --jq "[
+ .[] |
+ select(.draft == false) |
+ select(.created_at > \"$PROCESS_START_DATE\") |
+ select(.created_at < \"$CUTOFF\") |
+ select((.requested_reviewers | length > 0) or (.requested_teams | length > 0))
+ ]" > /tmp/candidates.json
+
+ # Filter to PRs with zero approving reviews
+ jq -r '.[].number' /tmp/candidates.json | while read -r PR_NUMBER; do
+ APPROVALS=$(gh api \
+ "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \
+ --jq "[.[] | select(.state == \"APPROVED\")] | length" 2>/dev/null || echo "0")
+
+ if [ "$APPROVALS" -eq 0 ]; then
+ jq ".[] | select(.number == ${PR_NUMBER}) | {number, title, author: .user.login, created_at}" \
+ /tmp/candidates.json
+ fi
+ done | jq -s '.' > /tmp/awaiting.json
+
+ COUNT=$(jq 'length' /tmp/awaiting.json)
+ echo "count=$COUNT" >> "$GITHUB_OUTPUT"
+
+ - name: Notify Slack
+ if: steps.stale.outputs.count != '0'
+ env:
+ SLACK_WEBHOOK_PR_REVIEW_BOT: ${{ secrets.SLACK_WEBHOOK_PR_REVIEW_BOT }}
+ COUNT: ${{ steps.stale.outputs.count }}
+ run: |
+ # Build Block Kit payload from JSON — no shell interpolation of PR titles.
+ # Why jq? PR titles are attacker-controllable input. By reading them
+ # through jq -r from the JSON file and passing the result to jq --arg,
+ # the content stays safely JSON-encoded in the final payload.
+ PRS=$(jq -r '.[] | "• <https://github.com/'"${REPO}"'/pull/\(.number)|#\(.number)> — \(.title) (by \(.author), opened \(.created_at | split("T")[0]))"' /tmp/awaiting.json)
+
+ jq -n \
+ --arg count "$COUNT" \
+ --arg prs "$PRS" \
+ '{
+ text: ($count + " PR(s) awaiting review for >72 hours"),
+ blocks: [
+ {
+ type: "section",
+ text: {
+ type: "mrkdwn",
+ text: (":hourglass_flowing_sand: *" + $count + " PR(s) Awaiting Review >72 Hours*")
+ }
+ },
+ {
+ type: "section",
+ text: { type: "mrkdwn", text: $prs }
+ },
+ { type: "divider" },
+ {
+ type: "context",
+ elements: [{
+ type: "mrkdwn",
+ text: "PRs awaiting review are surfaced daily. Reviewers: pick one up or reassign."
+ }]
+ }
+ ]
+ }' | \
+ curl -s -X POST "$SLACK_WEBHOOK_PR_REVIEW_BOT" \
+ -H 'Content-Type: application/json' \
+ -d @-
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
@@ -271,6 +271,7 @@ dependencies = [
"collections",
"credentials_provider",
"env_logger 0.11.8",
+ "feature_flags",
"fs",
"futures 0.3.31",
"google_ai",
@@ -333,7 +334,6 @@ dependencies = [
"agent_settings",
"ai_onboarding",
"anyhow",
- "arrayvec",
"assistant_slash_command",
"assistant_slash_commands",
"assistant_text_thread",
@@ -362,6 +362,7 @@ dependencies = [
"git",
"gpui",
"gpui_tokio",
+ "heapless",
"html_to_markdown",
"http_client",
"image",
@@ -661,7 +662,6 @@ dependencies = [
"schemars",
"serde",
"serde_json",
- "settings",
"strum 0.27.2",
"thiserror 2.0.17",
]
@@ -733,9 +733,6 @@ name = "arrayvec"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
-dependencies = [
- "serde",
-]
[[package]]
name = "as-raw-xcb-connection"
@@ -2073,7 +2070,16 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
dependencies = [
- "bit-vec",
+ "bit-vec 0.8.0",
+]
+
+[[package]]
+name = "bit-set"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34ddef2995421ab6a5c779542c81ee77c115206f4ad9d5a8e05f4ff49716a3dd"
+dependencies = [
+ "bit-vec 0.9.1",
]
[[package]]
@@ -2082,6 +2088,12 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
+[[package]]
+name = "bit-vec"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b71798fca2c1fe1086445a7258a4bc81e6e49dcd24c8d0dd9a1e57395b603f51"
+
[[package]]
name = "bit_field"
version = "0.10.3"
@@ -2193,7 +2205,7 @@ version = "3.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89ec27229c38ed0eb3c0feee3d2c1d6a4379ae44f418a29a658890e062d8f365"
dependencies = [
- "darling 0.20.11",
+ "darling 0.21.3",
"ident_case",
"prettyplease",
"proc-macro2",
@@ -3163,17 +3175,6 @@ dependencies = [
"objc",
]
-[[package]]
-name = "codespan-reporting"
-version = "0.12.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81"
-dependencies = [
- "serde",
- "termcolor",
- "unicode-width",
-]
-
[[package]]
name = "codespan-reporting"
version = "0.13.0"
@@ -3319,6 +3320,7 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"gpui",
+ "livekit_client",
"log",
"menu",
"notifications",
@@ -3338,6 +3340,7 @@ dependencies = [
"ui",
"util",
"workspace",
+ "zed_actions",
]
[[package]]
@@ -4396,7 +4399,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d74b6bcf49ebbd91f1b1875b706ea46545032a14003b5557b7dfa4bbeba6766e"
dependencies = [
"cc",
- "codespan-reporting 0.13.0",
+ "codespan-reporting",
"indexmap",
"proc-macro2",
"quote",
@@ -4411,7 +4414,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94ca2ad69673c4b35585edfa379617ac364bccd0ba0adf319811ba3a74ffa48a"
dependencies = [
"clap",
- "codespan-reporting 0.13.0",
+ "codespan-reporting",
"indexmap",
"proc-macro2",
"quote",
@@ -4616,6 +4619,7 @@ dependencies = [
"anyhow",
"gpui",
"indoc",
+ "inventory",
"log",
"paths",
"release_channel",
@@ -4624,6 +4628,7 @@ dependencies = [
"sqlez_macros",
"tempfile",
"util",
+ "uuid",
"zed_env_vars",
]
@@ -5230,7 +5235,6 @@ version = "0.1.0"
dependencies = [
"ai_onboarding",
"anyhow",
- "arrayvec",
"brotli",
"buffer_diff",
"client",
@@ -5248,6 +5252,7 @@ dependencies = [
"fs",
"futures 0.3.31",
"gpui",
+ "heapless",
"indoc",
"itertools 0.14.0",
"language",
@@ -5297,6 +5302,7 @@ dependencies = [
"client",
"cloud_llm_client",
"collections",
+ "db",
"debug_adapter_extension",
"dirs 4.0.0",
"edit_prediction",
@@ -6177,7 +6183,18 @@ version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f"
dependencies = [
- "bit-set",
+ "bit-set 0.8.0",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "fancy-regex"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72cf461f865c862bb7dc573f643dd6a2b6842f7c30b07882b56bd148cc2761b8"
+dependencies = [
+ "bit-set 0.8.0",
"regex-automata",
"regex-syntax",
]
@@ -7492,9 +7509,9 @@ dependencies = [
[[package]]
name = "glow"
-version = "0.16.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5e5ea60d70410161c8bf5da3fdfeaa1c72ed2c15f8bbb9d19fe3a4fad085f08"
+checksum = "29038e1c483364cc6bb3cf78feee1816002e127c331a1eec55a4d202b9e1adb5"
dependencies = [
"js-sys",
"slotmap",
@@ -7647,7 +7664,7 @@ dependencies = [
"mach2 0.5.0",
"media",
"metal",
- "naga 28.0.0",
+ "naga 29.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus",
"objc",
"objc2",
@@ -8007,6 +8024,15 @@ dependencies = [
"smallvec",
]
+[[package]]
+name = "hash32"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606"
+dependencies = [
+ "byteorder",
+]
+
[[package]]
name = "hashbrown"
version = "0.12.3"
@@ -8091,6 +8117,16 @@ dependencies = [
"http 0.2.12",
]
+[[package]]
+name = "heapless"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2af2455f757db2b292a9b1768c4b70186d443bcb3b316252d6b540aec1cd89ed"
+dependencies = [
+ "hash32",
+ "stable_deref_trait",
+]
+
[[package]]
name = "heck"
version = "0.3.3"
@@ -9152,7 +9188,7 @@ dependencies = [
"bytecount",
"data-encoding",
"email_address",
- "fancy-regex",
+ "fancy-regex 0.16.2",
"fraction",
"getrandom 0.3.4",
"idna",
@@ -9981,8 +10017,10 @@ dependencies = [
"settings",
"simplelog",
"smallvec",
+ "tokio",
"ui",
"util",
+ "webrtc-sys",
"zed-scap",
]
@@ -10236,7 +10274,6 @@ dependencies = [
"async-recursion",
"collections",
"editor",
- "fs",
"gpui",
"html5ever 0.27.0",
"language",
@@ -10748,16 +10785,16 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
[[package]]
name = "naga"
-version = "28.0.0"
+version = "29.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135"
+checksum = "85b4372fed0bd362d646d01b6926df0e837859ccc522fed720c395e0460f29c8"
dependencies = [
"arrayvec",
- "bit-set",
+ "bit-set 0.9.1",
"bitflags 2.10.0",
"cfg-if",
"cfg_aliases 0.2.1",
- "codespan-reporting 0.12.0",
+ "codespan-reporting",
"half",
"hashbrown 0.16.1",
"hexf-parse",
@@ -10773,15 +10810,15 @@ dependencies = [
[[package]]
name = "naga"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"arrayvec",
- "bit-set",
+ "bit-set 0.9.1",
"bitflags 2.10.0",
"cfg-if",
"cfg_aliases 0.2.1",
- "codespan-reporting 0.12.0",
+ "codespan-reporting",
"half",
"hashbrown 0.16.1",
"hexf-parse",
@@ -11311,9 +11348,9 @@ dependencies = [
[[package]]
name = "objc2-audio-toolbox"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "10cbe18d879e20a4aea544f8befe38bcf52255eb63d3f23eca2842f3319e4c07"
+checksum = "6948501a91121d6399b79abaa33a8aa4ea7857fe019f341b8c23ad6e81b79b08"
dependencies = [
"bitflags 2.10.0",
"libc",
@@ -11326,9 +11363,9 @@ dependencies = [
[[package]]
name = "objc2-avf-audio"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfc1d11521c211a7ebe17739fc806719da41f56c6b3f949d9861b459188ce910"
+checksum = "13a380031deed8e99db00065c45937da434ca987c034e13b87e4441f9e4090be"
dependencies = [
"objc2",
"objc2-foundation",
@@ -11336,9 +11373,9 @@ dependencies = [
[[package]]
name = "objc2-core-audio"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca44961e888e19313b808f23497073e3f6b3c22bb485056674c8b49f3b025c82"
+checksum = "e1eebcea8b0dbff5f7c8504f3107c68fc061a3eb44932051c8cf8a68d969c3b2"
dependencies = [
"dispatch2",
"objc2",
@@ -11378,9 +11415,9 @@ checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33"
[[package]]
name = "objc2-foundation"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c"
+checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272"
dependencies = [
"bitflags 2.10.0",
"block2",
@@ -11401,9 +11438,9 @@ dependencies = [
[[package]]
name = "objc2-metal"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f246c183239540aab1782457b35ab2040d4259175bd1d0c58e46ada7b47a874"
+checksum = "a0125f776a10d00af4152d74616409f0d4a2053a6f57fa5b7d6aa2854ac04794"
dependencies = [
"bitflags 2.10.0",
"block2",
@@ -11413,6 +11450,19 @@ dependencies = [
"objc2-foundation",
]
+[[package]]
+name = "objc2-quartz-core"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f"
+dependencies = [
+ "bitflags 2.10.0",
+ "objc2",
+ "objc2-core-foundation",
+ "objc2-foundation",
+ "objc2-metal",
+]
+
[[package]]
name = "objc_exception"
version = "0.1.2"
@@ -13142,7 +13192,7 @@ dependencies = [
"dap",
"encoding_rs",
"extension",
- "fancy-regex",
+ "fancy-regex 0.17.0",
"fs",
"futures 0.3.31",
"fuzzy",
@@ -13235,6 +13285,7 @@ dependencies = [
"criterion",
"db",
"editor",
+ "feature_flags",
"file_icons",
"git",
"git_ui",
@@ -13246,6 +13297,7 @@ dependencies = [
"pretty_assertions",
"project",
"rayon",
+ "remote_connection",
"schemars",
"search",
"serde",
@@ -13332,8 +13384,8 @@ name = "proptest"
version = "1.10.0"
source = "git+https://github.com/proptest-rs/proptest?rev=3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b#3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b"
dependencies = [
- "bit-set",
- "bit-vec",
+ "bit-set 0.8.0",
+ "bit-vec 0.8.0",
"bitflags 2.10.0",
"num-traits",
"proptest-macro",
@@ -13962,6 +14014,18 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539"
+[[package]]
+name = "raw-window-metal"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40d213455a5f1dc59214213c7330e074ddf8114c9a42411eb890c767357ce135"
+dependencies = [
+ "objc2",
+ "objc2-core-foundation",
+ "objc2-foundation",
+ "objc2-quartz-core",
+]
+
[[package]]
name = "rayon"
version = "1.11.0"
@@ -14624,10 +14688,10 @@ dependencies = [
name = "rope"
version = "0.1.0"
dependencies = [
- "arrayvec",
"criterion",
"ctor",
"gpui",
+ "heapless",
"log",
"rand 0.9.2",
"rayon",
@@ -15364,6 +15428,7 @@ dependencies = [
"language",
"lsp",
"menu",
+ "multi_buffer",
"pretty_assertions",
"project",
"serde",
@@ -15899,6 +15964,7 @@ dependencies = [
"theme",
"ui",
"util",
+ "vim_mode_setting",
"workspace",
"zed_actions",
]
@@ -16193,9 +16259,9 @@ dependencies = [
[[package]]
name = "spirv"
-version = "0.3.0+sdk-1.3.268.0"
+version = "0.4.0+sdk-1.4.341.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844"
+checksum = "d9571ea910ebd84c86af4b3ed27f9dbdc6ad06f17c5f96146b2b671e2976744f"
dependencies = [
"bitflags 2.10.0",
]
@@ -16686,8 +16752,8 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
name = "sum_tree"
version = "0.1.0"
dependencies = [
- "arrayvec",
"ctor",
+ "heapless",
"log",
"proptest",
"rand 0.9.2",
@@ -17605,7 +17671,7 @@ dependencies = [
"anyhow",
"base64 0.22.1",
"bstr",
- "fancy-regex",
+ "fancy-regex 0.16.2",
"lazy_static",
"regex",
"rustc-hash 1.1.0",
@@ -17692,15 +17758,14 @@ dependencies = [
[[package]]
name = "tiny_http"
-version = "0.8.2"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ce51b50006056f590c9b7c3808c3bd70f0d1101666629713866c227d6e58d39"
+checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82"
dependencies = [
"ascii",
- "chrono",
"chunked_transfer",
+ "httpdate",
"log",
- "url",
]
[[package]]
@@ -17750,15 +17815,17 @@ dependencies = [
"client",
"cloud_api_types",
"db",
- "feature_flags",
"git_ui",
"gpui",
+ "icons",
+ "livekit_client",
"notifications",
"platform_title_bar",
"project",
"recent_projects",
"release_channel",
"remote",
+ "remote_connection",
"rpc",
"schemars",
"semver",
@@ -19119,6 +19186,7 @@ dependencies = [
name = "vim_mode_setting"
version = "0.1.0"
dependencies = [
+ "gpui",
"settings",
]
@@ -20043,8 +20111,8 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3"
[[package]]
name = "wgpu"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"arrayvec",
"bitflags 2.10.0",
@@ -20055,7 +20123,7 @@ dependencies = [
"hashbrown 0.16.1",
"js-sys",
"log",
- "naga 28.0.1",
+ "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)",
"parking_lot",
"portable-atomic",
"profiling",
@@ -20072,12 +20140,12 @@ dependencies = [
[[package]]
name = "wgpu-core"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"arrayvec",
- "bit-set",
- "bit-vec",
+ "bit-set 0.9.1",
+ "bit-vec 0.9.1",
"bitflags 2.10.0",
"bytemuck",
"cfg_aliases 0.2.1",
@@ -20085,7 +20153,7 @@ dependencies = [
"hashbrown 0.16.1",
"indexmap",
"log",
- "naga 28.0.1",
+ "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)",
"once_cell",
"parking_lot",
"portable-atomic",
@@ -20098,48 +20166,48 @@ dependencies = [
"wgpu-core-deps-emscripten",
"wgpu-core-deps-windows-linux-android",
"wgpu-hal",
+ "wgpu-naga-bridge",
"wgpu-types",
]
[[package]]
name = "wgpu-core-deps-apple"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"wgpu-hal",
]
[[package]]
name = "wgpu-core-deps-emscripten"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"wgpu-hal",
]
[[package]]
name = "wgpu-core-deps-windows-linux-android"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"wgpu-hal",
]
[[package]]
name = "wgpu-hal"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"android_system_properties",
"arrayvec",
"ash",
- "bit-set",
+ "bit-set 0.9.1",
"bitflags 2.10.0",
- "block",
+ "block2",
"bytemuck",
"cfg-if",
"cfg_aliases 0.2.1",
- "core-graphics-types 0.2.0",
"glow",
"glutin_wgl_sys",
"gpu-allocator",
@@ -20150,10 +20218,13 @@ dependencies = [
"libc",
"libloading",
"log",
- "metal",
- "naga 28.0.1",
+ "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)",
"ndk-sys",
- "objc",
+ "objc2",
+ "objc2-core-foundation",
+ "objc2-foundation",
+ "objc2-metal",
+ "objc2-quartz-core",
"once_cell",
"ordered-float 4.6.0",
"parking_lot",
@@ -20162,25 +20233,38 @@ dependencies = [
"profiling",
"range-alloc",
"raw-window-handle",
+ "raw-window-metal",
"renderdoc-sys",
"smallvec",
"thiserror 2.0.17",
"wasm-bindgen",
+ "wayland-sys",
"web-sys",
+ "wgpu-naga-bridge",
"wgpu-types",
"windows 0.62.2",
"windows-core 0.62.2",
]
+[[package]]
+name = "wgpu-naga-bridge"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
+dependencies = [
+ "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)",
+ "wgpu-types",
+]
+
[[package]]
name = "wgpu-types"
-version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
+version = "29.0.0"
+source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575"
dependencies = [
"bitflags 2.10.0",
"bytemuck",
"js-sys",
"log",
+ "raw-window-handle",
"web-sys",
]
@@ -21437,6 +21521,7 @@ dependencies = [
"ui",
"util",
"uuid",
+ "vim_mode_setting",
"windows 0.61.3",
"zed_actions",
"zlog",
@@ -21853,7 +21938,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.229.0"
+version = "0.230.0"
dependencies = [
"acp_thread",
"acp_tools",
@@ -480,7 +480,6 @@ aho-corasick = "1.1"
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "9d9640d4" }
any_vec = "0.14"
anyhow = "1.0.86"
-arrayvec = { version = "0.7.4", features = ["serde"] }
ashpd = { version = "0.13", default-features = false, features = [
"async-io",
"notification",
@@ -556,7 +555,7 @@ emojis = "0.6.1"
env_logger = "0.11"
encoding_rs = "0.8"
exec = "0.3.1"
-fancy-regex = "0.16.0"
+fancy-regex = "0.17.0"
fork = "0.4.0"
futures = "0.3"
futures-concurrency = "7.7.1"
@@ -564,6 +563,7 @@ futures-lite = "1.13"
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "37f3c0575d379c218a9c455ee67585184e40d43f" }
git2 = { version = "0.20.1", default-features = false, features = ["vendored-libgit2"] }
globset = "0.4"
+heapless = "0.9.2"
handlebars = "4.3"
heck = "0.5"
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
@@ -596,13 +596,13 @@ markup5ever_rcdom = "0.3.0"
metal = "0.33"
minidumper = "0.9"
moka = { version = "0.12.10", features = ["sync"] }
-naga = { version = "28.0", features = ["wgsl-in"] }
+naga = { version = "29.0", features = ["wgsl-in"] }
nanoid = "0.4"
nbformat = "1.2.0"
nix = "0.29"
num-format = "0.4.4"
objc = "0.2"
-objc2-foundation = { version = "=0.3.1", default-features = false, features = [
+objc2-foundation = { version = "=0.3.2", default-features = false, features = [
"NSArray",
"NSAttributedString",
"NSBundle",
@@ -717,7 +717,7 @@ time = { version = "0.3", features = [
"formatting",
"local-offset",
] }
-tiny_http = "0.8"
+tiny_http = "0.12"
tokio = { version = "1" }
tokio-socks = { version = "0.5.2", default-features = false, features = [
"futures-io",
@@ -779,7 +779,8 @@ wax = "0.7"
which = "6.0.0"
wasm-bindgen = "0.2.113"
web-time = "1.1.0"
-wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "465557eccfe77c840a9b4936f1408da9503372c4" }
+webrtc-sys = "0.3.23"
+wgpu = { git = "https://github.com/zed-industries/wgpu.git", branch = "v29" }
windows-core = "0.61"
yawc = "0.2.5"
zeroize = "1.8"
@@ -849,6 +850,7 @@ windows-capture = { git = "https://github.com/zed-industries/windows-capture.git
calloop = { git = "https://github.com/zed-industries/calloop" }
livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "c1209aa155cbf4543383774f884a46ae7e53ee2e" }
libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "c1209aa155cbf4543383774f884a46ae7e53ee2e" }
+webrtc-sys = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "c1209aa155cbf4543383774f884a46ae7e53ee2e" }
[profile.dev]
split-debuginfo = "unpacked"
@@ -0,0 +1,5 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 7.29524V10.6536" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M6.3208 8.97442H9.67917" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M12.8 13C13.1183 13 13.4235 12.8761 13.6486 12.6554C13.8735 12.4349 14 12.1356 14 11.8236V5.94118C14 5.62916 13.8735 5.32992 13.6486 5.10929C13.4235 4.88866 13.1183 4.76471 12.8 4.76471H8.06C7.8593 4.76664 7.66133 4.71919 7.48418 4.6267C7.30703 4.53421 7.15637 4.39964 7.046 4.2353L6.56 3.52941C6.45073 3.36675 6.30199 3.23322 6.1271 3.14082C5.95221 3.04842 5.75666 3.00004 5.558 3H3.2C2.88174 3 2.57651 3.12395 2.35148 3.34458C2.12643 3.56521 2 3.86445 2 4.17647V11.8236C2 12.1356 2.12643 12.4349 2.35148 12.6554C2.57651 12.8761 2.88174 13 3.2 13H12.8Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -0,0 +1,7 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M7.99567 13.0812C8.93101 13.0812 9.68925 12.3229 9.68925 11.3876C9.68925 10.4522 8.93101 9.694 7.99567 9.694C7.06033 9.694 6.30209 10.4522 6.30209 11.3876C6.30209 12.3229 7.06033 13.0812 7.99567 13.0812Z" stroke="#A9AFBC" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M4.61023 6.30643C5.54557 6.30643 6.30381 5.54819 6.30381 4.61286C6.30381 3.67752 5.54557 2.91928 4.61023 2.91928C3.6749 2.91928 2.91666 3.67752 2.91666 4.61286C2.91666 5.54819 3.6749 6.30643 4.61023 6.30643Z" stroke="#A9AFBC" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M11.3915 6.30643C12.3268 6.30643 13.0851 5.54819 13.0851 4.61286C13.0851 3.67752 12.3268 2.91928 11.3915 2.91928C10.4561 2.91928 9.69791 3.67752 9.69791 4.61286C9.69791 5.54819 10.4561 6.30643 11.3915 6.30643Z" stroke="#A9AFBC" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M11.3889 6.306V7.43505C11.3889 7.77377 11.1631 7.99958 10.8244 7.99958H5.17912C4.8404 7.99958 4.61459 7.77377 4.61459 7.43505V6.306" stroke="#A9AFBC" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M8 8V9.69358" stroke="#A9AFBC" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -0,0 +1,6 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M12.5 3V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M9.5 6V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M6.5 9V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M3.5 12V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+</svg>
@@ -0,0 +1,6 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path opacity="0.2" d="M12.5 3V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path opacity="0.2" d="M9.5 6V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M6.5 9V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M3.5 12V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+</svg>
@@ -0,0 +1,6 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path opacity="0.2" d="M12.5 3V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M9.5 6V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M6.5 9V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+<path d="M3.5 12V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round"/>
+</svg>
@@ -391,6 +391,14 @@
"ctrl-enter": "search::ReplaceAll",
},
},
+ {
+ "context": "BufferSearchBar && !in_replace > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ "shift-enter": "search::SelectPreviousMatch",
+ },
+ },
{
"context": "BufferSearchBar && !in_replace > Editor",
"bindings": {
@@ -424,6 +432,12 @@
"ctrl-alt-enter": "search::ReplaceAll",
},
},
+ {
+ "context": "ProjectSearchBar && !in_replace > Editor",
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ },
+ },
{
"context": "ProjectSearchView",
"bindings": {
@@ -675,11 +689,12 @@
"use_key_equivalents": true,
"bindings": {
"ctrl-n": "agents_sidebar::NewThreadInGroup",
- "left": "agents_sidebar::CollapseSelectedEntry",
- "right": "agents_sidebar::ExpandSelectedEntry",
+ "left": "menu::SelectParent",
+ "right": "menu::SelectChild",
"enter": "menu::Confirm",
"space": "menu::Confirm",
"ctrl-f": "agents_sidebar::FocusSidebarFilter",
+ "ctrl-g": "agents_sidebar::ToggleArchive",
"shift-backspace": "agent::RemoveSelectedThread",
},
},
@@ -770,18 +785,14 @@
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
"alt-l": "editor::AcceptEditPrediction",
- "tab": "editor::AcceptEditPrediction",
"alt-k": "editor::AcceptNextWordEditPrediction",
"alt-j": "editor::AcceptNextLineEditPrediction",
},
},
{
- "context": "Editor && edit_prediction_conflict",
+ "context": "Editor && edit_prediction && edit_prediction_mode == eager",
"bindings": {
- "alt-tab": "editor::AcceptEditPrediction",
- "alt-l": "editor::AcceptEditPrediction",
- "alt-k": "editor::AcceptNextWordEditPrediction",
- "alt-j": "editor::AcceptNextLineEditPrediction",
+ "tab": "editor::AcceptEditPrediction",
},
},
{
@@ -899,6 +910,8 @@
"ctrl-alt-c": "project_panel::CopyPath",
"alt-shift-copy": "workspace::CopyRelativePath",
"alt-ctrl-shift-c": "workspace::CopyRelativePath",
+ "undo": "project_panel::Undo",
+ "ctrl-z": "project_panel::Undo",
"enter": "project_panel::Rename",
"f2": "project_panel::Rename",
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
@@ -1443,8 +1456,8 @@
{
"context": "GitPicker",
"bindings": {
- "alt-1": "git_picker::ActivateBranchesTab",
- "alt-2": "git_picker::ActivateWorktreesTab",
+ "alt-1": "git_picker::ActivateWorktreesTab",
+ "alt-2": "git_picker::ActivateBranchesTab",
"alt-3": "git_picker::ActivateStashTab",
},
},
@@ -446,6 +446,13 @@
{
"context": "BufferSearchBar && !in_replace > Editor",
"use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ "shift-enter": "search::SelectPreviousMatch",
+ },
+ },
+ {
+ "context": "BufferSearchBar && !in_replace > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery",
@@ -473,7 +480,6 @@
},
{
"context": "ProjectSearchBar > Editor",
- "use_key_equivalents": true,
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery",
@@ -487,6 +493,12 @@
"cmd-enter": "search::ReplaceAll",
},
},
+ {
+ "context": "ProjectSearchBar && !in_replace > Editor",
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ },
+ },
{
"context": "ProjectSearchView",
"use_key_equivalents": true,
@@ -743,11 +755,12 @@
"use_key_equivalents": true,
"bindings": {
"cmd-n": "agents_sidebar::NewThreadInGroup",
- "left": "agents_sidebar::CollapseSelectedEntry",
- "right": "agents_sidebar::ExpandSelectedEntry",
+ "left": "menu::SelectParent",
+ "right": "menu::SelectChild",
"enter": "menu::Confirm",
"space": "menu::Confirm",
"cmd-f": "agents_sidebar::FocusSidebarFilter",
+ "cmd-g": "agents_sidebar::ToggleArchive",
"shift-backspace": "agent::RemoveSelectedThread",
},
},
@@ -834,18 +847,14 @@
"context": "Editor && edit_prediction",
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
- "tab": "editor::AcceptEditPrediction",
"ctrl-cmd-right": "editor::AcceptNextWordEditPrediction",
"ctrl-cmd-down": "editor::AcceptNextLineEditPrediction",
},
},
{
- "context": "Editor && edit_prediction_conflict",
- "use_key_equivalents": true,
+ "context": "Editor && edit_prediction && edit_prediction_mode == eager",
"bindings": {
- "alt-tab": "editor::AcceptEditPrediction",
- "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction",
- "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction",
+ "tab": "editor::AcceptEditPrediction",
},
},
{
@@ -960,6 +969,7 @@
"cmd-v": "project_panel::Paste",
"cmd-alt-c": "workspace::CopyPath",
"alt-cmd-shift-c": "workspace::CopyRelativePath",
+ "cmd-z": "project_panel::Undo",
"enter": "project_panel::Rename",
"f2": "project_panel::Rename",
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
@@ -1521,8 +1531,8 @@
{
"context": "GitPicker",
"bindings": {
- "cmd-1": "git_picker::ActivateBranchesTab",
- "cmd-2": "git_picker::ActivateWorktreesTab",
+ "cmd-1": "git_picker::ActivateWorktreesTab",
+ "cmd-2": "git_picker::ActivateBranchesTab",
"cmd-3": "git_picker::ActivateStashTab",
},
},
@@ -398,6 +398,13 @@
{
"context": "BufferSearchBar && !in_replace > Editor",
"use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ "shift-enter": "search::SelectPreviousMatch",
+ },
+ },
+ {
+ "context": "BufferSearchBar && !in_replace > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery",
@@ -415,7 +422,6 @@
},
{
"context": "ProjectSearchBar > Editor",
- "use_key_equivalents": true,
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery",
@@ -429,6 +435,12 @@
"ctrl-alt-enter": "search::ReplaceAll",
},
},
+ {
+ "context": "ProjectSearchBar && !in_replace > Editor",
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ },
+ },
{
"context": "ProjectSearchView",
"use_key_equivalents": true,
@@ -679,11 +691,12 @@
"use_key_equivalents": true,
"bindings": {
"ctrl-n": "agents_sidebar::NewThreadInGroup",
- "left": "agents_sidebar::CollapseSelectedEntry",
- "right": "agents_sidebar::ExpandSelectedEntry",
+ "left": "menu::SelectParent",
+ "right": "menu::SelectChild",
"enter": "menu::Confirm",
"space": "menu::Confirm",
"ctrl-f": "agents_sidebar::FocusSidebarFilter",
+ "ctrl-g": "agents_sidebar::ToggleArchive",
"shift-backspace": "agent::RemoveSelectedThread",
},
},
@@ -766,19 +779,15 @@
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
"alt-l": "editor::AcceptEditPrediction",
- "tab": "editor::AcceptEditPrediction",
"alt-k": "editor::AcceptNextWordEditPrediction",
"alt-j": "editor::AcceptNextLineEditPrediction",
},
},
{
- "context": "Editor && edit_prediction_conflict",
+ "context": "Editor && edit_prediction && edit_prediction_mode == eager",
"use_key_equivalents": true,
"bindings": {
- "alt-tab": "editor::AcceptEditPrediction",
- "alt-l": "editor::AcceptEditPrediction",
- "alt-k": "editor::AcceptNextWordEditPrediction",
- "alt-j": "editor::AcceptNextLineEditPrediction",
+ "tab": "editor::AcceptEditPrediction",
},
},
{
@@ -897,6 +906,7 @@
"ctrl-v": "project_panel::Paste",
"shift-alt-c": "project_panel::CopyPath",
"ctrl-k ctrl-shift-c": "workspace::CopyRelativePath",
+ "ctrl-z": "project_panel::Undo",
"enter": "project_panel::Rename",
"f2": "project_panel::Rename",
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
@@ -1436,8 +1446,8 @@
{
"context": "GitPicker",
"bindings": {
- "alt-1": "git_picker::ActivateBranchesTab",
- "alt-2": "git_picker::ActivateWorktreesTab",
+ "alt-1": "git_picker::ActivateWorktreesTab",
+ "alt-2": "git_picker::ActivateBranchesTab",
"alt-3": "git_picker::ActivateStashTab",
},
},
@@ -511,8 +511,8 @@
"g shift-u": "git::UnstageAndNext", // Zed specific
// Window mode
- "space w v": "pane::SplitDown",
- "space w s": "pane::SplitRight",
+ "space w v": "pane::SplitRight",
+ "space w s": "pane::SplitDown",
"space w h": "workspace::ActivatePaneLeft",
"space w j": "workspace::ActivatePaneDown",
"space w k": "workspace::ActivatePaneUp",
@@ -1060,7 +1060,7 @@
},
},
{
- "context": "Editor && edit_prediction",
+ "context": "Editor && edit_prediction && edit_prediction_mode == eager",
"bindings": {
// This is identical to the binding in the base keymap, but the vim bindings above to
// "vim::Tab" shadow it, so it needs to be bound again.
@@ -1073,15 +1073,7 @@
"enter": "agent::Chat",
},
},
- {
- "context": "os != macos && Editor && edit_prediction_conflict",
- "bindings": {
- // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This
- // is because alt-tab may not be available, as it is often used for window switching on Linux
- // and Windows.
- "alt-l": "editor::AcceptEditPrediction",
- },
- },
+
{
"context": "SettingsWindow > NavigationMenu && !search",
"bindings": {
@@ -1121,4 +1113,31 @@
"k": "notebook::NotebookMoveUp",
},
},
+ {
+ "context": "ThreadsSidebar && !Editor",
+ "bindings": {
+ "j": "menu::SelectNext",
+ "k": "menu::SelectPrevious",
+ "h": "menu::SelectParent",
+ "l": "menu::SelectChild",
+ "g g": "menu::SelectFirst",
+ "shift-g": "menu::SelectLast",
+ "/": "agents_sidebar::FocusSidebarFilter",
+ "z a": "editor::ToggleFold",
+ "z c": "menu::SelectParent",
+ "z o": "menu::SelectChild",
+ "z shift-m": "editor::FoldAll",
+ "z shift-r": "editor::UnfoldAll",
+ },
+ },
+ {
+ "context": "ThreadsSidebar > Editor && VimControl && vim_mode == normal",
+ "bindings": {
+ "j": "editor::MoveDown",
+ "k": "editor::MoveUp",
+ "/": "vim::SwitchToInsertMode",
+ "escape": "menu::Cancel",
+ "enter": "editor::Newline",
+ },
+ },
]
@@ -922,6 +922,10 @@
///
/// Default: false
"tree_view": false,
+ // Whether the git panel should open on startup.
+ //
+ // Default: false
+ "starts_open": false,
// Whether to show a badge on the git panel icon with the count of uncommitted changes.
//
// Default: false
@@ -1069,6 +1073,7 @@
"spawn_agent": true,
"terminal": true,
"thinking": true,
+ "update_plan": true,
"web_search": true,
},
},
@@ -1088,6 +1093,7 @@
"grep": true,
"spawn_agent": true,
"thinking": true,
+ "update_plan": true,
"web_search": true,
},
},
@@ -48,6 +48,11 @@
"show_summary": true,
// Whether to show the command line in the output of the spawned task, defaults to `true`.
"show_command": true,
+ // Which edited buffers to save before running the task:
+ // * `all` — save all edited buffers
+ // * `current` — save current buffer only
+ // * `none` — don't save any buffers
+ "save": "all",
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
// "tags": []
},
@@ -31,6 +31,7 @@ use task::{Shell, ShellBuilder};
pub use terminal::*;
use text::Bias;
use ui::App;
+use util::markdown::MarkdownEscaped;
use util::path_list::PathList;
use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle};
use uuid::Uuid;
@@ -246,6 +247,8 @@ impl ToolCall {
) -> Result<Self> {
let title = if tool_call.kind == acp::ToolKind::Execute {
tool_call.title
+ } else if tool_call.kind == acp::ToolKind::Edit {
+ MarkdownEscaped(tool_call.title.as_str()).to_string()
} else if let Some((first_line, _)) = tool_call.title.split_once("\n") {
first_line.to_owned() + "…"
} else {
@@ -334,6 +337,8 @@ impl ToolCall {
self.label.update(cx, |label, cx| {
if self.kind == acp::ToolKind::Execute {
label.replace(title, cx);
+ } else if self.kind == acp::ToolKind::Edit {
+ label.replace(MarkdownEscaped(&title).to_string(), cx)
} else if let Some((first_line, _)) = title.split_once("\n") {
label.replace(first_line.to_owned() + "…", cx);
} else {
@@ -489,6 +494,58 @@ impl From<&ResolvedLocation> for AgentLocation {
}
}
+#[derive(Debug, Clone)]
+pub enum SelectedPermissionParams {
+ Terminal { patterns: Vec<String> },
+}
+
+#[derive(Debug)]
+pub struct SelectedPermissionOutcome {
+ pub option_id: acp::PermissionOptionId,
+ pub params: Option<SelectedPermissionParams>,
+}
+
+impl SelectedPermissionOutcome {
+ pub fn new(option_id: acp::PermissionOptionId) -> Self {
+ Self {
+ option_id,
+ params: None,
+ }
+ }
+
+ pub fn params(mut self, params: Option<SelectedPermissionParams>) -> Self {
+ self.params = params;
+ self
+ }
+}
+
+impl From<acp::PermissionOptionId> for SelectedPermissionOutcome {
+ fn from(option_id: acp::PermissionOptionId) -> Self {
+ Self::new(option_id)
+ }
+}
+
+impl From<SelectedPermissionOutcome> for acp::SelectedPermissionOutcome {
+ fn from(value: SelectedPermissionOutcome) -> Self {
+ Self::new(value.option_id)
+ }
+}
+
+#[derive(Debug)]
+pub enum RequestPermissionOutcome {
+ Cancelled,
+ Selected(SelectedPermissionOutcome),
+}
+
+impl From<RequestPermissionOutcome> for acp::RequestPermissionOutcome {
+ fn from(value: RequestPermissionOutcome) -> Self {
+ match value {
+ RequestPermissionOutcome::Cancelled => Self::Cancelled,
+ RequestPermissionOutcome::Selected(outcome) => Self::Selected(outcome.into()),
+ }
+ }
+}
+
#[derive(Debug)]
pub enum ToolCallStatus {
/// The tool call hasn't started running yet, but we start showing it to
@@ -497,7 +554,7 @@ pub enum ToolCallStatus {
/// The tool call is waiting for confirmation from the user.
WaitingForConfirmation {
options: PermissionOptions,
- respond_tx: oneshot::Sender<acp::PermissionOptionId>,
+ respond_tx: oneshot::Sender<SelectedPermissionOutcome>,
},
/// The tool call is currently running.
InProgress,
@@ -1326,6 +1383,18 @@ impl AcpThread {
acp::SessionUpdate::Plan(plan) => {
self.update_plan(plan, cx);
}
+ acp::SessionUpdate::SessionInfoUpdate(info_update) => {
+ if let acp::MaybeUndefined::Value(title) = info_update.title {
+ let had_provisional = self.provisional_title.take().is_some();
+ let title: SharedString = title.into();
+ if title != self.title {
+ self.title = title;
+ cx.emit(AcpThreadEvent::TitleUpdated);
+ } else if had_provisional {
+ cx.emit(AcpThreadEvent::TitleUpdated);
+ }
+ }
+ }
acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate {
available_commands,
..
@@ -1912,7 +1981,7 @@ impl AcpThread {
tool_call: acp::ToolCallUpdate,
options: PermissionOptions,
cx: &mut Context<Self>,
- ) -> Result<Task<acp::RequestPermissionOutcome>> {
+ ) -> Result<Task<RequestPermissionOutcome>> {
let (tx, rx) = oneshot::channel();
let status = ToolCallStatus::WaitingForConfirmation {
@@ -1928,10 +1997,8 @@ impl AcpThread {
Ok(cx.spawn(async move |this, cx| {
let outcome = match rx.await {
- Ok(option) => acp::RequestPermissionOutcome::Selected(
- acp::SelectedPermissionOutcome::new(option),
- ),
- Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled,
+ Ok(outcome) => RequestPermissionOutcome::Selected(outcome),
+ Err(oneshot::Canceled) => RequestPermissionOutcome::Cancelled,
};
this.update(cx, |_this, cx| {
cx.emit(AcpThreadEvent::ToolAuthorizationReceived(tool_call_id))
@@ -1944,7 +2011,7 @@ impl AcpThread {
pub fn authorize_tool_call(
&mut self,
id: acp::ToolCallId,
- option_id: acp::PermissionOptionId,
+ outcome: SelectedPermissionOutcome,
option_kind: acp::PermissionOptionKind,
cx: &mut Context<Self>,
) {
@@ -1965,7 +2032,7 @@ impl AcpThread {
let curr_status = mem::replace(&mut call.status, new_status);
if let ToolCallStatus::WaitingForConfirmation { respond_tx, .. } = curr_status {
- respond_tx.send(option_id).log_err();
+ respond_tx.send(outcome).log_err();
} else if cfg!(debug_assertions) {
panic!("tried to authorize an already authorized tool call");
}
@@ -4965,4 +5032,77 @@ mod tests {
"real title should propagate to the connection"
);
}
+
+ #[gpui::test]
+ async fn test_session_info_update_replaces_provisional_title_and_emits_event(
+ cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+ let connection = Rc::new(FakeAgentConnection::new());
+
+ let thread = cx
+ .update(|cx| {
+ connection.clone().new_session(
+ project,
+ PathList::new(&[Path::new(path!("/test"))]),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ let title_updated_events = Rc::new(RefCell::new(0usize));
+ let title_updated_events_for_subscription = title_updated_events.clone();
+ thread.update(cx, |_thread, cx| {
+ cx.subscribe(
+ &thread,
+ move |_thread, _event_thread, event: &AcpThreadEvent, _cx| {
+ if matches!(event, AcpThreadEvent::TitleUpdated) {
+ *title_updated_events_for_subscription.borrow_mut() += 1;
+ }
+ },
+ )
+ .detach();
+ });
+
+ thread.update(cx, |thread, cx| {
+ thread.set_provisional_title("Hello, can you help…".into(), cx);
+ });
+ assert_eq!(
+ *title_updated_events.borrow(),
+ 1,
+ "setting a provisional title should emit TitleUpdated"
+ );
+
+ let result = thread.update(cx, |thread, cx| {
+ thread.handle_session_update(
+ acp::SessionUpdate::SessionInfoUpdate(
+ acp::SessionInfoUpdate::new().title("Helping with Rust question"),
+ ),
+ cx,
+ )
+ });
+ result.expect("session info update should succeed");
+
+ thread.read_with(cx, |thread, _| {
+ assert_eq!(thread.title().as_ref(), "Helping with Rust question");
+ assert!(
+ !thread.has_provisional_title(),
+ "session info title update should clear provisional title"
+ );
+ });
+
+ assert_eq!(
+ *title_updated_events.borrow(),
+ 2,
+ "session info title update should emit TitleUpdated"
+ );
+ assert!(
+ connection.set_title_calls.borrow().is_empty(),
+ "session info title update should not propagate back to the connection"
+ );
+ }
}
@@ -2,12 +2,13 @@ use crate::AcpThread;
use agent_client_protocol::{self as acp};
use anyhow::Result;
use chrono::{DateTime, Utc};
-use collections::IndexMap;
+use collections::{HashMap, IndexMap};
use gpui::{Entity, SharedString, Task};
use language_model::LanguageModelProviderId;
use project::{AgentId, Project};
use serde::{Deserialize, Serialize};
use std::{any::Any, error::Error, fmt, path::PathBuf, rc::Rc, sync::Arc};
+use task::{HideStrategy, SpawnInTerminal, TaskId};
use ui::{App, IconName};
use util::path_list::PathList;
use uuid::Uuid;
@@ -21,6 +22,28 @@ impl UserMessageId {
}
}
+pub fn build_terminal_auth_task(
+ id: String,
+ label: String,
+ command: String,
+ args: Vec<String>,
+ env: HashMap<String, String>,
+) -> SpawnInTerminal {
+ SpawnInTerminal {
+ id: TaskId(id),
+ full_label: label.clone(),
+ label: label.clone(),
+ command: Some(command),
+ args,
+ command_label: label,
+ env,
+ use_new_terminal: true,
+ allow_concurrent_runs: true,
+ hide: HideStrategy::Always,
+ ..Default::default()
+ }
+}
+
pub trait AgentConnection {
fn agent_id(&self) -> AgentId;
@@ -90,6 +113,14 @@ pub trait AgentConnection {
fn auth_methods(&self) -> &[acp::AuthMethod];
+ fn terminal_auth_task(
+ &self,
+ _method: &acp::AuthMethodId,
+ _cx: &App,
+ ) -> Option<SpawnInTerminal> {
+ None
+ }
+
fn authenticate(&self, method: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>>;
fn prompt(
@@ -439,6 +470,7 @@ impl AgentModelList {
pub struct PermissionOptionChoice {
pub allow: acp::PermissionOption,
pub deny: acp::PermissionOption,
+ pub sub_patterns: Vec<String>,
}
impl PermissionOptionChoice {
@@ -447,10 +479,26 @@ impl PermissionOptionChoice {
}
}
+/// Pairs a tool's permission pattern with its display name
+///
+/// For example, a pattern of `^cargo\\s+build(\\s|$)` would display as `cargo
+/// build`. It's handy to keep these together rather than trying to derive
+/// one from the other.
+#[derive(Debug, Clone, PartialEq)]
+pub struct PermissionPattern {
+ pub pattern: String,
+ pub display_name: String,
+}
+
#[derive(Debug, Clone)]
pub enum PermissionOptions {
Flat(Vec<acp::PermissionOption>),
Dropdown(Vec<PermissionOptionChoice>),
+ DropdownWithPatterns {
+ choices: Vec<PermissionOptionChoice>,
+ patterns: Vec<PermissionPattern>,
+ tool_name: String,
+ },
}
impl PermissionOptions {
@@ -458,6 +506,7 @@ impl PermissionOptions {
match self {
PermissionOptions::Flat(options) => options.is_empty(),
PermissionOptions::Dropdown(options) => options.is_empty(),
+ PermissionOptions::DropdownWithPatterns { choices, .. } => choices.is_empty(),
}
}
@@ -476,6 +525,17 @@ impl PermissionOptions {
None
}
}),
+ PermissionOptions::DropdownWithPatterns { choices, .. } => {
+ choices.iter().find_map(|choice| {
+ if choice.allow.kind == kind {
+ Some(&choice.allow)
+ } else if choice.deny.kind == kind {
+ Some(&choice.deny)
+ } else {
+ None
+ }
+ })
+ }
}
}
@@ -536,11 +596,14 @@ mod test_support {
)
}
- #[derive(Clone, Default)]
+ #[derive(Clone)]
pub struct StubAgentConnection {
sessions: Arc<Mutex<HashMap<acp::SessionId, Session>>>,
permission_requests: HashMap<acp::ToolCallId, PermissionOptions>,
next_prompt_updates: Arc<Mutex<Vec<acp::SessionUpdate>>>,
+ supports_load_session: bool,
+ agent_id: AgentId,
+ telemetry_id: SharedString,
}
struct Session {
@@ -548,12 +611,21 @@ mod test_support {
response_tx: Option<oneshot::Sender<acp::StopReason>>,
}
+ impl Default for StubAgentConnection {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+
impl StubAgentConnection {
pub fn new() -> Self {
Self {
next_prompt_updates: Default::default(),
permission_requests: HashMap::default(),
sessions: Arc::default(),
+ supports_load_session: false,
+ agent_id: AgentId::new("stub"),
+ telemetry_id: "stub".into(),
}
}
@@ -569,6 +641,59 @@ mod test_support {
self
}
+ pub fn with_supports_load_session(mut self, supports_load_session: bool) -> Self {
+ self.supports_load_session = supports_load_session;
+ self
+ }
+
+ pub fn with_agent_id(mut self, agent_id: AgentId) -> Self {
+ self.agent_id = agent_id;
+ self
+ }
+
+ pub fn with_telemetry_id(mut self, telemetry_id: SharedString) -> Self {
+ self.telemetry_id = telemetry_id;
+ self
+ }
+
+ fn create_session(
+ self: Rc<Self>,
+ session_id: acp::SessionId,
+ project: Entity<Project>,
+ work_dirs: PathList,
+ title: Option<SharedString>,
+ cx: &mut gpui::App,
+ ) -> Entity<AcpThread> {
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+ let thread_title = title.unwrap_or_else(|| SharedString::new_static("Test"));
+ let thread = cx.new(|cx| {
+ AcpThread::new(
+ None,
+ thread_title,
+ Some(work_dirs),
+ self.clone(),
+ project,
+ action_log,
+ session_id.clone(),
+ watch::Receiver::constant(
+ acp::PromptCapabilities::new()
+ .image(true)
+ .audio(true)
+ .embedded_context(true),
+ ),
+ cx,
+ )
+ });
+ self.sessions.lock().insert(
+ session_id,
+ Session {
+ thread: thread.downgrade(),
+ response_tx: None,
+ },
+ );
+ thread
+ }
+
pub fn send_update(
&self,
session_id: acp::SessionId,
@@ -606,11 +731,11 @@ mod test_support {
impl AgentConnection for StubAgentConnection {
fn agent_id(&self) -> AgentId {
- AgentId::new("stub")
+ self.agent_id.clone()
}
fn telemetry_id(&self) -> SharedString {
- "stub".into()
+ self.telemetry_id.clone()
}
fn auth_methods(&self) -> &[acp::AuthMethod] {
@@ -633,32 +758,27 @@ mod test_support {
static NEXT_SESSION_ID: AtomicUsize = AtomicUsize::new(0);
let session_id =
acp::SessionId::new(NEXT_SESSION_ID.fetch_add(1, Ordering::SeqCst).to_string());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let thread = cx.new(|cx| {
- AcpThread::new(
- None,
- "Test",
- Some(work_dirs),
- self.clone(),
- project,
- action_log,
- session_id.clone(),
- watch::Receiver::constant(
- acp::PromptCapabilities::new()
- .image(true)
- .audio(true)
- .embedded_context(true),
- ),
- cx,
- )
- });
- self.sessions.lock().insert(
- session_id,
- Session {
- thread: thread.downgrade(),
- response_tx: None,
- },
- );
+ let thread = self.create_session(session_id, project, work_dirs, None, cx);
+ Task::ready(Ok(thread))
+ }
+
+ fn supports_load_session(&self) -> bool {
+ self.supports_load_session
+ }
+
+ fn load_session(
+ self: Rc<Self>,
+ session_id: acp::SessionId,
+ project: Entity<Project>,
+ work_dirs: PathList,
+ title: Option<SharedString>,
+ cx: &mut App,
+ ) -> Task<Result<Entity<AcpThread>>> {
+ if !self.supports_load_session {
+ return Task::ready(Err(anyhow::Error::msg("Loading sessions is not supported")));
+ }
+
+ let thread = self.create_session(session_id, project, work_dirs, title, cx);
Task::ready(Ok(thread))
}
@@ -493,8 +493,12 @@ impl NativeAgent {
})??
.await;
this.update(cx, |this, cx| {
- if let Some(state) = this.projects.get_mut(&project_id) {
- state.project_context = cx.new(|_| project_context);
+ if let Some(state) = this.projects.get(&project_id) {
+ state
+ .project_context
+ .update(cx, |current_project_context, _cx| {
+ *current_project_context = project_context;
+ });
}
})?;
}
@@ -723,7 +727,7 @@ impl NativeAgent {
fn handle_models_updated_event(
&mut self,
_registry: Entity<LanguageModelRegistry>,
- _event: &language_model::Event,
+ event: &language_model::Event,
cx: &mut Context<Self>,
) {
self.models.refresh_list(cx);
@@ -740,7 +744,13 @@ impl NativeAgent {
thread.set_model(model, cx);
cx.notify();
}
- thread.set_summarization_model(summarization_model.clone(), cx);
+ if let Some(model) = summarization_model.clone() {
+ if thread.summarization_model().is_none()
+ || matches!(event, language_model::Event::ThreadSummaryModelChanged)
+ {
+ thread.set_summarization_model(Some(model), cx);
+ }
+ }
});
}
}
@@ -1194,12 +1204,11 @@ impl NativeAgentConnection {
thread.request_tool_call_authorization(tool_call, options, cx)
})??;
cx.background_spawn(async move {
- if let acp::RequestPermissionOutcome::Selected(
- acp::SelectedPermissionOutcome { option_id, .. },
- ) = outcome_task.await
+ if let acp_thread::RequestPermissionOutcome::Selected(outcome) =
+ outcome_task.await
{
response
- .send(option_id)
+ .send(outcome)
.map(|_| anyhow!("authorization receiver was dropped"))
.log_err();
}
@@ -1216,6 +1225,9 @@ impl NativeAgentConnection {
thread.update_tool_call(update, cx)
})??;
}
+ ThreadEvent::Plan(plan) => {
+ acp_thread.update(cx, |thread, cx| thread.update_plan(plan, cx))?;
+ }
ThreadEvent::SubagentSpawned(session_id) => {
acp_thread.update(cx, |thread, cx| {
thread.subagent_spawned(session_id, cx);
@@ -2131,10 +2143,15 @@ mod internal_tests {
.unwrap();
cx.run_until_parked();
+ let thread = agent.read_with(cx, |agent, _cx| {
+ agent.sessions.values().next().unwrap().thread.clone()
+ });
+
agent.read_with(cx, |agent, cx| {
let project_id = project.entity_id();
let state = agent.projects.get(&project_id).unwrap();
- assert_eq!(state.project_context.read(cx).worktrees, vec![])
+ assert_eq!(state.project_context.read(cx).worktrees, vec![]);
+ assert_eq!(thread.read(cx).project_context().read(cx).worktrees, vec![]);
});
let worktree = project
@@ -2145,14 +2162,16 @@ mod internal_tests {
agent.read_with(cx, |agent, cx| {
let project_id = project.entity_id();
let state = agent.projects.get(&project_id).unwrap();
+ let expected_worktrees = vec![WorktreeContext {
+ root_name: "a".into(),
+ abs_path: Path::new("/a").into(),
+ rules_file: None,
+ }];
+ assert_eq!(state.project_context.read(cx).worktrees, expected_worktrees);
assert_eq!(
- state.project_context.read(cx).worktrees,
- vec![WorktreeContext {
- root_name: "a".into(),
- abs_path: Path::new("/a").into(),
- rules_file: None
- }]
- )
+ thread.read(cx).project_context().read(cx).worktrees,
+ expected_worktrees
+ );
});
// Creating `/a/.rules` updates the project context.
@@ -2165,18 +2184,20 @@ mod internal_tests {
.read(cx)
.entry_for_path(rel_path(".rules"))
.unwrap();
+ let expected_worktrees = vec![WorktreeContext {
+ root_name: "a".into(),
+ abs_path: Path::new("/a").into(),
+ rules_file: Some(RulesFileContext {
+ path_in_worktree: rel_path(".rules").into(),
+ text: "".into(),
+ project_entry_id: rules_entry.id.to_usize(),
+ }),
+ }];
+ assert_eq!(state.project_context.read(cx).worktrees, expected_worktrees);
assert_eq!(
- state.project_context.read(cx).worktrees,
- vec![WorktreeContext {
- root_name: "a".into(),
- abs_path: Path::new("/a").into(),
- rules_file: Some(RulesFileContext {
- path_in_worktree: rel_path(".rules").into(),
- text: "".into(),
- project_entry_id: rules_entry.id.to_usize()
- })
- }]
- )
+ thread.read(cx).project_context().read(cx).worktrees,
+ expected_worktrees
+ );
});
}
@@ -2441,6 +2462,61 @@ mod internal_tests {
});
}
+ #[gpui::test]
+ async fn test_summarization_model_survives_transient_registry_clearing(
+ cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/", json!({ "a": {} })).await;
+ let project = Project::test(fs.clone(), [], cx).await;
+
+ let thread_store = cx.new(|cx| ThreadStore::new(cx));
+ let agent =
+ cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx));
+ let connection = Rc::new(NativeAgentConnection(agent.clone()));
+
+ let acp_thread = cx
+ .update(|cx| {
+ connection.clone().new_session(
+ project.clone(),
+ PathList::new(&[Path::new("/a")]),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+ let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone());
+
+ let thread = agent.read_with(cx, |agent, _| {
+ agent.sessions.get(&session_id).unwrap().thread.clone()
+ });
+
+ thread.read_with(cx, |thread, _| {
+ assert!(
+ thread.summarization_model().is_some(),
+ "session should have a summarization model from the test registry"
+ );
+ });
+
+ // Simulate what happens during a provider blip:
+ // update_active_language_model_from_settings calls set_default_model(None)
+ // when it can't resolve the model, clearing all fallbacks.
+ cx.update(|cx| {
+ LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+ registry.set_default_model(None, cx);
+ });
+ });
+ cx.run_until_parked();
+
+ thread.read_with(cx, |thread, _| {
+ assert!(
+ thread.summarization_model().is_some(),
+ "summarization model should survive a transient default model clearing"
+ );
+ });
+ }
+
#[gpui::test]
async fn test_loaded_thread_preserves_thinking_enabled(cx: &mut TestAppContext) {
init_test(cx);
@@ -7,7 +7,7 @@ use anyhow::Result;
use collections::HashSet;
use fs::Fs;
use gpui::{App, Entity, Task};
-use project::AgentId;
+use project::{AgentId, Project};
use prompt_store::PromptStore;
use settings::{LanguageModelSelection, Settings as _, update_settings_file};
@@ -37,6 +37,7 @@ impl AgentServer for NativeAgentServer {
fn connect(
&self,
_delegate: AgentServerDelegate,
+ _project: Entity<Project>,
cx: &mut App,
) -> Task<Result<Rc<dyn acp_thread::AgentConnection>>> {
log::debug!("NativeAgentServer::connect");
@@ -1,4 +1,5 @@
-use shell_command_parser::extract_terminal_command_prefix;
+use acp_thread::PermissionPattern;
+use shell_command_parser::{extract_commands, extract_terminal_command_prefix};
use std::path::{Path, PathBuf};
use url::Url;
@@ -42,12 +43,21 @@ fn extract_command_prefix(command: &str) -> Option<CommandPrefix> {
})
}
-/// Extracts a regex pattern from a terminal command based on the first token (command name).
+/// Extracts a regex pattern and display name from a terminal command.
///
/// Returns `None` for commands starting with `./`, `/`, or other path-like prefixes.
/// This is a deliberate security decision: we only allow pattern-based "always allow"
/// rules for well-known command names (like `cargo`, `npm`, `git`), not for arbitrary
/// scripts or absolute paths which could be manipulated by an attacker.
+pub fn extract_terminal_permission_pattern(command: &str) -> Option<PermissionPattern> {
+ let pattern = extract_terminal_pattern(command)?;
+ let display_name = extract_terminal_pattern_display(command)?;
+ Some(PermissionPattern {
+ pattern,
+ display_name,
+ })
+}
+
pub fn extract_terminal_pattern(command: &str) -> Option<String> {
let prefix = extract_command_prefix(command)?;
let tokens = prefix.normalized_tokens;
@@ -71,6 +81,35 @@ pub fn extract_terminal_pattern_display(command: &str) -> Option<String> {
Some(prefix.display)
}
+/// Extracts patterns for ALL commands in a pipeline, not just the first one.
+///
+/// For a command like `"cargo test 2>&1 | tail"`, this returns patterns for
+/// both `cargo` and `tail`. Path-based commands (e.g. `./script.sh`) are
+/// filtered out, and duplicate command names are deduplicated while preserving
+/// order.
+pub fn extract_all_terminal_patterns(command: &str) -> Vec<PermissionPattern> {
+ let commands = match extract_commands(command) {
+ Some(commands) => commands,
+ None => return Vec::new(),
+ };
+
+ let mut results = Vec::new();
+
+ for cmd in &commands {
+ let Some(permission_pattern) = extract_terminal_permission_pattern(cmd) else {
+ continue;
+ };
+
+ if results.contains(&permission_pattern) {
+ continue;
+ }
+
+ results.push(permission_pattern);
+ }
+
+ results
+}
+
pub fn extract_path_pattern(path: &str) -> Option<String> {
let parent = Path::new(path).parent()?;
let parent_str = normalize_separators(parent.to_str()?);
@@ -273,6 +312,39 @@ mod tests {
);
}
+ #[test]
+ fn test_extract_all_terminal_patterns_pipeline() {
+ assert_eq!(
+ extract_all_terminal_patterns("cargo test 2>&1 | tail"),
+ vec![
+ PermissionPattern {
+ pattern: "^cargo\\s+test(\\s|$)".to_string(),
+ display_name: "cargo test".to_string(),
+ },
+ PermissionPattern {
+ pattern: "^tail\\b".to_string(),
+ display_name: "tail".to_string(),
+ },
+ ]
+ );
+ }
+
+ #[test]
+ fn test_extract_all_terminal_patterns_with_path_commands() {
+ assert_eq!(
+ extract_all_terminal_patterns("./script.sh | grep foo"),
+ vec![PermissionPattern {
+ pattern: "^grep\\s+foo(\\s|$)".to_string(),
+ display_name: "grep foo".to_string(),
+ }]
+ );
+ }
+
+ #[test]
+ fn test_extract_all_terminal_patterns_all_paths() {
+ assert_eq!(extract_all_terminal_patterns("./a.sh | /usr/bin/b"), vec![]);
+ }
+
#[test]
fn test_extract_path_pattern() {
assert_eq!(
@@ -85,6 +85,7 @@ mod tests {
let templates = Templates::new();
let rendered = template.render(&templates).unwrap();
assert!(rendered.contains("## Fixing Diagnostics"));
+ assert!(!rendered.contains("## Planning"));
assert!(rendered.contains("test-model"));
}
}
@@ -20,6 +20,34 @@ You are a highly skilled software engineer with extensive knowledge in many prog
- When running commands that may run indefinitely or for a long time (such as build scripts, tests, servers, or file watchers), specify `timeout_ms` to bound runtime. If the command times out, the user can always ask you to run it again with a longer timeout or no timeout if they're willing to wait or cancel manually.
- Avoid HTML entity escaping - use plain characters instead.
+{{#if (contains available_tools 'update_plan') }}
+## Planning
+
+- You have access to an `update_plan` tool which tracks steps and progress and renders them to the user.
+- Use it to show that you've understood the task and to make complex, ambiguous, or multi-phase work easier for the user to follow.
+- A good plan breaks the work into meaningful, logically ordered steps that are easy to verify as you go.
+- When writing a plan, prefer a short list of concise, concrete steps.
+- Keep each step focused on a real unit of work and use short 1-sentence descriptions.
+- Do not use plans for simple or single-step queries that you can just do or answer immediately.
+- Do not use plans to pad your response with filler steps or to state the obvious.
+- Do not include steps that you are not actually capable of doing.
+- After calling `update_plan`, do not repeat the full plan in your response. The UI already displays it. Instead, briefly summarize what changed and note any important context or next step.
+- Before moving on to a new phase of work, mark the previous step as completed when appropriate.
+- When work is in progress, prefer having exactly one step marked as `in_progress`.
+- You can mark multiple completed steps in a single `update_plan` call.
+- If the task changes midway through, update the plan so it reflects the new approach.
+
+Use a plan when:
+
+- The task is non-trivial and will require multiple actions over a longer horizon.
+- There are logical phases or dependencies where sequencing matters.
+- The work has ambiguity that benefits from outlining high-level goals.
+- You want intermediate checkpoints for feedback and validation.
+- The user asked you to do more than one thing in a single prompt.
+- The user asked you to use the plan tool or TODOs.
+- You discover additional steps while working and intend to complete them before yielding to the user.
+
+{{/if}}
## Searching and Reading
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
@@ -146,6 +174,22 @@ Otherwise, follow debugging best practices:
2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data.
3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
+{{#if (contains available_tools 'spawn_agent') }}
+## Multi-agent delegation
+Sub-agents can help you move faster on large tasks when you use them thoughtfully. This is most useful for:
+* Very large tasks with multiple well-defined scopes
+* Plans with multiple independent steps that can be executed in parallel
+* Independent information-gathering tasks that can be done in parallel
+* Requesting a review from another agent on your work or another agent's work
+* Getting a fresh perspective on a difficult design or debugging question
+* Running tests or config commands that can output a large amount of logs when you want a concise summary. Because you only receive the subagent's final message, ask it to include the relevant failing lines or diagnostics in its response.
+
+When you delegate work, focus on coordinating and synthesizing results instead of duplicating the same work yourself. If multiple agents might edit files, assign them disjoint write scopes.
+
+This feature must be used wisely. For simple or straightforward tasks, prefer doing the work directly instead of spawning a new agent.
+
+{{/if}}
+
## System Information
Operating System: {{os}}
@@ -841,14 +841,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
// Approve the first - send "allow" option_id (UI transforms "once" to "allow")
tool_call_auth_1
.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
cx.run_until_parked();
// Reject the second - send "deny" option_id directly since Deny is now a button
tool_call_auth_2
.response
- .send(acp::PermissionOptionId::new("deny"))
+ .send(acp::PermissionOptionId::new("deny").into())
.unwrap();
cx.run_until_parked();
@@ -892,9 +892,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
let tool_call_auth_3 = next_tool_call_authorization(&mut events).await;
tool_call_auth_3
.response
- .send(acp::PermissionOptionId::new(
- "always_allow:tool_requiring_permission",
- ))
+ .send(acp::PermissionOptionId::new("always_allow:tool_requiring_permission").into())
.unwrap();
cx.run_until_parked();
let completion = fake_model.pending_completions().pop().unwrap();
@@ -1001,6 +999,20 @@ async fn expect_tool_call_update_fields(
}
}
+async fn expect_plan(events: &mut UnboundedReceiver<Result<ThreadEvent>>) -> acp::Plan {
+ let event = events
+ .next()
+ .await
+ .expect("no plan event received")
+ .unwrap();
+ match event {
+ ThreadEvent::Plan(plan) => plan,
+ event => {
+ panic!("Unexpected event {event:?}");
+ }
+ }
+}
+
async fn next_tool_call_authorization(
events: &mut UnboundedReceiver<Result<ThreadEvent>>,
) -> ToolCallAuthorization {
@@ -1183,32 +1195,88 @@ fn test_permission_option_ids_for_terminal() {
panic!("Expected dropdown permission options");
};
- let allow_ids: Vec<String> = choices
- .iter()
- .map(|choice| choice.allow.option_id.0.to_string())
- .collect();
- let deny_ids: Vec<String> = choices
- .iter()
- .map(|choice| choice.deny.option_id.0.to_string())
- .collect();
+ // Expect 3 choices: always-tool, always-pattern, once
+ assert_eq!(choices.len(), 3);
- assert!(allow_ids.contains(&"always_allow:terminal".to_string()));
- assert!(allow_ids.contains(&"allow".to_string()));
- assert!(
- allow_ids
- .iter()
- .any(|id| id.starts_with("always_allow_pattern:terminal\n")),
- "Missing allow pattern option"
+ // First two choices both use the tool-level option IDs
+ assert_eq!(
+ choices[0].allow.option_id.0.as_ref(),
+ "always_allow:terminal"
);
+ assert_eq!(choices[0].deny.option_id.0.as_ref(), "always_deny:terminal");
+ assert!(choices[0].sub_patterns.is_empty());
- assert!(deny_ids.contains(&"always_deny:terminal".to_string()));
- assert!(deny_ids.contains(&"deny".to_string()));
- assert!(
- deny_ids
- .iter()
- .any(|id| id.starts_with("always_deny_pattern:terminal\n")),
- "Missing deny pattern option"
+ assert_eq!(
+ choices[1].allow.option_id.0.as_ref(),
+ "always_allow:terminal"
);
+ assert_eq!(choices[1].deny.option_id.0.as_ref(), "always_deny:terminal");
+ assert_eq!(choices[1].sub_patterns, vec!["^cargo\\s+build(\\s|$)"]);
+
+ // Third choice is the one-time allow/deny
+ assert_eq!(choices[2].allow.option_id.0.as_ref(), "allow");
+ assert_eq!(choices[2].deny.option_id.0.as_ref(), "deny");
+ assert!(choices[2].sub_patterns.is_empty());
+}
+
+#[test]
+fn test_permission_options_terminal_pipeline_produces_dropdown_with_patterns() {
+ let permission_options = ToolPermissionContext::new(
+ TerminalTool::NAME,
+ vec!["cargo test 2>&1 | tail".to_string()],
+ )
+ .build_permission_options();
+
+ let PermissionOptions::DropdownWithPatterns {
+ choices,
+ patterns,
+ tool_name,
+ } = permission_options
+ else {
+ panic!("Expected DropdownWithPatterns permission options for pipeline command");
+ };
+
+ assert_eq!(tool_name, TerminalTool::NAME);
+
+ // Should have "Always for terminal" and "Only this time" choices
+ assert_eq!(choices.len(), 2);
+ let labels: Vec<&str> = choices
+ .iter()
+ .map(|choice| choice.allow.name.as_ref())
+ .collect();
+ assert!(labels.contains(&"Always for terminal"));
+ assert!(labels.contains(&"Only this time"));
+
+ // Should have per-command patterns for "cargo test" and "tail"
+ assert_eq!(patterns.len(), 2);
+ let pattern_names: Vec<&str> = patterns.iter().map(|cp| cp.display_name.as_str()).collect();
+ assert!(pattern_names.contains(&"cargo test"));
+ assert!(pattern_names.contains(&"tail"));
+
+ // Verify patterns are valid regex patterns
+ let regex_patterns: Vec<&str> = patterns.iter().map(|cp| cp.pattern.as_str()).collect();
+ assert!(regex_patterns.contains(&"^cargo\\s+test(\\s|$)"));
+ assert!(regex_patterns.contains(&"^tail\\b"));
+}
+
+#[test]
+fn test_permission_options_terminal_pipeline_with_chaining() {
+ let permission_options = ToolPermissionContext::new(
+ TerminalTool::NAME,
+ vec!["npm install && npm test | tail".to_string()],
+ )
+ .build_permission_options();
+
+ let PermissionOptions::DropdownWithPatterns { patterns, .. } = permission_options else {
+ panic!("Expected DropdownWithPatterns for chained pipeline command");
+ };
+
+ // With subcommand-aware patterns, "npm install" and "npm test" are distinct
+ assert_eq!(patterns.len(), 3);
+ let pattern_names: Vec<&str> = patterns.iter().map(|cp| cp.display_name.as_str()).collect();
+ assert!(pattern_names.contains(&"npm install"));
+ assert!(pattern_names.contains(&"npm test"));
+ assert!(pattern_names.contains(&"tail"));
}
#[gpui::test]
@@ -3362,6 +3430,122 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
);
}
+#[gpui::test]
+async fn test_update_plan_tool_updates_thread_events(cx: &mut TestAppContext) {
+ let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await;
+ thread.update(cx, |thread, _cx| thread.add_tool(UpdatePlanTool));
+ let fake_model = model.as_fake();
+
+ let mut events = thread
+ .update(cx, |thread, cx| {
+ thread.send(UserMessageId::new(), ["Make a plan"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ let input = json!({
+ "plan": [
+ {
+ "step": "Inspect the code",
+ "status": "completed",
+ "priority": "high"
+ },
+ {
+ "step": "Implement the tool",
+ "status": "in_progress"
+ },
+ {
+ "step": "Run tests",
+ "status": "pending",
+ "priority": "low"
+ }
+ ]
+ });
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: "plan_1".into(),
+ name: UpdatePlanTool::NAME.into(),
+ raw_input: input.to_string(),
+ input,
+ is_input_complete: true,
+ thought_signature: None,
+ },
+ ));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ let tool_call = expect_tool_call(&mut events).await;
+ assert_eq!(
+ tool_call,
+ acp::ToolCall::new("plan_1", "Update plan")
+ .kind(acp::ToolKind::Think)
+ .raw_input(json!({
+ "plan": [
+ {
+ "step": "Inspect the code",
+ "status": "completed",
+ "priority": "high"
+ },
+ {
+ "step": "Implement the tool",
+ "status": "in_progress"
+ },
+ {
+ "step": "Run tests",
+ "status": "pending",
+ "priority": "low"
+ }
+ ]
+ }))
+ .meta(acp::Meta::from_iter([(
+ "tool_name".into(),
+ "update_plan".into()
+ )]))
+ );
+
+ let update = expect_tool_call_update_fields(&mut events).await;
+ assert_eq!(
+ update,
+ acp::ToolCallUpdate::new(
+ "plan_1",
+ acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress)
+ )
+ );
+
+ let plan = expect_plan(&mut events).await;
+ assert_eq!(
+ plan,
+ acp::Plan::new(vec![
+ acp::PlanEntry::new(
+ "Inspect the code",
+ acp::PlanEntryPriority::High,
+ acp::PlanEntryStatus::Completed,
+ ),
+ acp::PlanEntry::new(
+ "Implement the tool",
+ acp::PlanEntryPriority::Medium,
+ acp::PlanEntryStatus::InProgress,
+ ),
+ acp::PlanEntry::new(
+ "Run tests",
+ acp::PlanEntryPriority::Low,
+ acp::PlanEntryStatus::Pending,
+ ),
+ ])
+ );
+
+ let update = expect_tool_call_update_fields(&mut events).await;
+ assert_eq!(
+ update,
+ acp::ToolCallUpdate::new(
+ "plan_1",
+ acp::ToolCallUpdateFields::new()
+ .status(acp::ToolCallStatus::Completed)
+ .raw_output("Plan updated")
+ )
+ );
+}
+
#[gpui::test]
async fn test_send_no_retry_on_success(cx: &mut TestAppContext) {
let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await;
@@ -3768,6 +3952,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
StreamingEchoTool::NAME: true,
StreamingFailingEchoTool::NAME: true,
TerminalTool::NAME: true,
+ UpdatePlanTool::NAME: true,
}
}
}
@@ -4957,48 +5142,6 @@ async fn test_subagent_tool_resume_session(cx: &mut TestAppContext) {
);
}
-#[gpui::test]
-async fn test_subagent_tool_is_present_when_feature_flag_enabled(cx: &mut TestAppContext) {
- init_test(cx);
-
- cx.update(|cx| {
- cx.update_flags(true, vec!["subagents".to_string()]);
- });
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(path!("/test"), json!({})).await;
- let project = Project::test(fs, [path!("/test").as_ref()], cx).await;
- let project_context = cx.new(|_cx| ProjectContext::default());
- let context_server_store = project.read_with(cx, |project, _| project.context_server_store());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx));
- let model = Arc::new(FakeLanguageModel::default());
-
- let environment = Rc::new(cx.update(|cx| {
- FakeThreadEnvironment::default().with_terminal(FakeTerminalHandle::new_never_exits(cx))
- }));
-
- let thread = cx.new(|cx| {
- let mut thread = Thread::new(
- project.clone(),
- project_context,
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- );
- thread.add_default_tools(environment, cx);
- thread
- });
-
- thread.read_with(cx, |thread, _| {
- assert!(
- thread.has_registered_tool(SpawnAgentTool::NAME),
- "subagent tool should be present when feature flag is enabled"
- );
- });
-}
-
#[gpui::test]
async fn test_subagent_thread_inherits_parent_thread_properties(cx: &mut TestAppContext) {
init_test(cx);
@@ -3,12 +3,14 @@ use crate::{
DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool,
ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool,
RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool,
- SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, WebSearchTool,
- decide_permission_from_settings,
+ SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision,
+ UpdatePlanTool, WebSearchTool, decide_permission_from_settings,
};
use acp_thread::{MentionUri, UserMessageId};
use action_log::ActionLog;
-use feature_flags::{FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag};
+use feature_flags::{
+ FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, UpdatePlanToolFeatureFlag,
+};
use agent_client_protocol as acp;
use agent_settings::{
@@ -661,6 +663,7 @@ pub enum ThreadEvent {
AgentThinking(String),
ToolCall(acp::ToolCall),
ToolCallUpdate(acp_thread::ToolCallUpdate),
+ Plan(acp::Plan),
ToolCallAuthorization(ToolCallAuthorization),
SubagentSpawned(acp::SessionId),
Retry(acp_thread::RetryStatus),
@@ -758,6 +761,48 @@ impl ToolPermissionContext {
true
};
+ // For terminal commands with multiple pipeline commands, use DropdownWithPatterns
+ // to let users individually select which command patterns to always allow.
+ if tool_name == TerminalTool::NAME && shell_supports_always_allow {
+ if let Some(input) = input_values.first() {
+ let all_patterns = extract_all_terminal_patterns(input);
+ if all_patterns.len() > 1 {
+ let mut choices = Vec::new();
+ choices.push(acp_thread::PermissionOptionChoice {
+ allow: acp::PermissionOption::new(
+ acp::PermissionOptionId::new(format!("always_allow:{}", tool_name)),
+ format!("Always for {}", tool_name.replace('_', " ")),
+ acp::PermissionOptionKind::AllowAlways,
+ ),
+ deny: acp::PermissionOption::new(
+ acp::PermissionOptionId::new(format!("always_deny:{}", tool_name)),
+ format!("Always for {}", tool_name.replace('_', " ")),
+ acp::PermissionOptionKind::RejectAlways,
+ ),
+ sub_patterns: vec![],
+ });
+ choices.push(acp_thread::PermissionOptionChoice {
+ allow: acp::PermissionOption::new(
+ acp::PermissionOptionId::new("allow"),
+ "Only this time",
+ acp::PermissionOptionKind::AllowOnce,
+ ),
+ deny: acp::PermissionOption::new(
+ acp::PermissionOptionId::new("deny"),
+ "Only this time",
+ acp::PermissionOptionKind::RejectOnce,
+ ),
+ sub_patterns: vec![],
+ });
+ return acp_thread::PermissionOptions::DropdownWithPatterns {
+ choices,
+ patterns: all_patterns,
+ tool_name: tool_name.clone(),
+ };
+ }
+ }
+ }
+
let extract_for_value = |value: &str| -> (Option<String>, Option<String>) {
if tool_name == TerminalTool::NAME {
(
@@ -806,20 +851,22 @@ impl ToolPermissionContext {
let mut choices = Vec::new();
- let mut push_choice = |label: String, allow_id, deny_id, allow_kind, deny_kind| {
- choices.push(acp_thread::PermissionOptionChoice {
- allow: acp::PermissionOption::new(
- acp::PermissionOptionId::new(allow_id),
- label.clone(),
- allow_kind,
- ),
- deny: acp::PermissionOption::new(
- acp::PermissionOptionId::new(deny_id),
- label,
- deny_kind,
- ),
- });
- };
+ let mut push_choice =
+ |label: String, allow_id, deny_id, allow_kind, deny_kind, sub_patterns: Vec<String>| {
+ choices.push(acp_thread::PermissionOptionChoice {
+ allow: acp::PermissionOption::new(
+ acp::PermissionOptionId::new(allow_id),
+ label.clone(),
+ allow_kind,
+ ),
+ deny: acp::PermissionOption::new(
+ acp::PermissionOptionId::new(deny_id),
+ label,
+ deny_kind,
+ ),
+ sub_patterns,
+ });
+ };
if shell_supports_always_allow {
push_choice(
@@ -828,6 +875,7 @@ impl ToolPermissionContext {
format!("always_deny:{}", tool_name),
acp::PermissionOptionKind::AllowAlways,
acp::PermissionOptionKind::RejectAlways,
+ vec![],
);
if let (Some(pattern), Some(display)) = (pattern, pattern_display) {
@@ -838,10 +886,11 @@ impl ToolPermissionContext {
};
push_choice(
button_text,
- format!("always_allow_pattern:{}\n{}", tool_name, pattern),
- format!("always_deny_pattern:{}\n{}", tool_name, pattern),
+ format!("always_allow:{}", tool_name),
+ format!("always_deny:{}", tool_name),
acp::PermissionOptionKind::AllowAlways,
acp::PermissionOptionKind::RejectAlways,
+ vec![pattern],
);
}
}
@@ -852,6 +901,7 @@ impl ToolPermissionContext {
"deny".to_string(),
acp::PermissionOptionKind::AllowOnce,
acp::PermissionOptionKind::RejectOnce,
+ vec![],
);
acp_thread::PermissionOptions::Dropdown(choices)
@@ -862,7 +912,7 @@ impl ToolPermissionContext {
pub struct ToolCallAuthorization {
pub tool_call: acp::ToolCallUpdate,
pub options: acp_thread::PermissionOptions,
- pub response: oneshot::Sender<acp::PermissionOptionId>,
+ pub response: oneshot::Sender<acp_thread::SelectedPermissionOutcome>,
pub context: Option<ToolPermissionContext>,
}
@@ -1482,6 +1532,9 @@ impl Thread {
self.add_tool(MovePathTool::new(self.project.clone()));
self.add_tool(NowTool);
self.add_tool(OpenTool::new(self.project.clone()));
+ if cx.has_flag::<UpdatePlanToolFeatureFlag>() {
+ self.add_tool(UpdatePlanTool);
+ }
self.add_tool(ReadFileTool::new(
self.project.clone(),
self.action_log.clone(),
@@ -3429,6 +3482,10 @@ impl ThreadEventStream {
.ok();
}
+ fn send_plan(&self, plan: acp::Plan) {
+ self.0.unbounded_send(Ok(ThreadEvent::Plan(plan))).ok();
+ }
+
fn send_retry(&self, status: acp_thread::RetryStatus) {
self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok();
}
@@ -3564,6 +3621,10 @@ impl ToolCallEventStream {
.ok();
}
+ pub fn update_plan(&self, plan: acp::Plan) {
+ self.stream.send_plan(plan);
+ }
+
/// Authorize a third-party tool (e.g., MCP tool from a context server).
///
/// Unlike built-in tools, third-party tools don't support pattern-based permissions.
@@ -3617,6 +3678,7 @@ impl ToolCallEventStream {
format!("Always for {} MCP tool", display_name),
acp::PermissionOptionKind::RejectAlways,
),
+ sub_patterns: vec![],
},
acp_thread::PermissionOptionChoice {
allow: acp::PermissionOption::new(
@@ -3629,6 +3691,7 @@ impl ToolCallEventStream {
"Only this time",
acp::PermissionOptionKind::RejectOnce,
),
+ sub_patterns: vec![],
},
]),
response: response_tx,
@@ -3644,40 +3707,13 @@ impl ToolCallEventStream {
let fs = self.fs.clone();
cx.spawn(async move |cx| {
- let response_str = response_rx.await?.0.to_string();
-
- if response_str == format!("always_allow_mcp:{}", tool_id) {
- if let Some(fs) = fs.clone() {
- cx.update(|cx| {
- update_settings_file(fs, cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_tool_default_permission(&tool_id, ToolPermissionMode::Allow);
- });
- });
- }
- return Ok(());
- }
- if response_str == format!("always_deny_mcp:{}", tool_id) {
- if let Some(fs) = fs.clone() {
- cx.update(|cx| {
- update_settings_file(fs, cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_tool_default_permission(&tool_id, ToolPermissionMode::Deny);
- });
- });
- }
- return Err(anyhow!("Permission to run tool denied by user"));
- }
-
- if response_str == "allow" {
- return Ok(());
+ let outcome = response_rx.await?;
+ let is_allow = Self::persist_permission_outcome(&outcome, fs, &cx);
+ if is_allow {
+ Ok(())
+ } else {
+ Err(anyhow!("Permission to run tool denied by user"))
}
-
- Err(anyhow!("Permission to run tool denied by user"))
})
}
@@ -3687,8 +3723,6 @@ impl ToolCallEventStream {
context: ToolPermissionContext,
cx: &mut App,
) -> Task<Result<()>> {
- use settings::ToolPermissionMode;
-
let options = context.build_permission_options();
let (response_tx, response_rx) = oneshot::channel();
@@ -3715,90 +3749,118 @@ impl ToolCallEventStream {
let fs = self.fs.clone();
cx.spawn(async move |cx| {
- let response_str = response_rx.await?.0.to_string();
-
- // Handle "always allow tool" - e.g., "always_allow:terminal"
- if let Some(tool) = response_str.strip_prefix("always_allow:") {
- if let Some(fs) = fs.clone() {
- let tool = tool.to_string();
- cx.update(|cx| {
- update_settings_file(fs, cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_tool_default_permission(&tool, ToolPermissionMode::Allow);
- });
- });
- }
- return Ok(());
+ let outcome = response_rx.await?;
+ let is_allow = Self::persist_permission_outcome(&outcome, fs, &cx);
+ if is_allow {
+ Ok(())
+ } else {
+ Err(anyhow!("Permission to run tool denied by user"))
}
+ })
+ }
- // Handle "always deny tool" - e.g., "always_deny:terminal"
- if let Some(tool) = response_str.strip_prefix("always_deny:") {
- if let Some(fs) = fs.clone() {
- let tool = tool.to_string();
- cx.update(|cx| {
- update_settings_file(fs, cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_tool_default_permission(&tool, ToolPermissionMode::Deny);
- });
- });
- }
- return Err(anyhow!("Permission to run tool denied by user"));
- }
+ /// Interprets a `SelectedPermissionOutcome` and persists any settings changes.
+ /// Returns `true` if the tool call should be allowed, `false` if denied.
+ fn persist_permission_outcome(
+ outcome: &acp_thread::SelectedPermissionOutcome,
+ fs: Option<Arc<dyn Fs>>,
+ cx: &AsyncApp,
+ ) -> bool {
+ let option_id = outcome.option_id.0.as_ref();
+
+ let always_permission = option_id
+ .strip_prefix("always_allow:")
+ .map(|tool| (tool, ToolPermissionMode::Allow))
+ .or_else(|| {
+ option_id
+ .strip_prefix("always_deny:")
+ .map(|tool| (tool, ToolPermissionMode::Deny))
+ })
+ .or_else(|| {
+ option_id
+ .strip_prefix("always_allow_mcp:")
+ .map(|tool| (tool, ToolPermissionMode::Allow))
+ })
+ .or_else(|| {
+ option_id
+ .strip_prefix("always_deny_mcp:")
+ .map(|tool| (tool, ToolPermissionMode::Deny))
+ });
- // Handle "always allow pattern" - e.g., "always_allow_pattern:mcp:server:tool\n^cargo\s"
- if let Some(rest) = response_str.strip_prefix("always_allow_pattern:") {
- if let Some((pattern_tool_name, pattern)) = rest.split_once('\n') {
- let pattern_tool_name = pattern_tool_name.to_string();
- let pattern = pattern.to_string();
- if let Some(fs) = fs.clone() {
- cx.update(|cx| {
- update_settings_file(fs, cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .add_tool_allow_pattern(&pattern_tool_name, pattern);
- });
- });
- }
- } else {
- log::error!("Failed to parse always allow pattern: missing newline separator in '{rest}'");
- }
- return Ok(());
- }
+ if let Some((tool, mode)) = always_permission {
+ let params = outcome.params.as_ref();
+ Self::persist_always_permission(tool, mode, params, fs, cx);
+ return mode == ToolPermissionMode::Allow;
+ }
- // Handle "always deny pattern" - e.g., "always_deny_pattern:mcp:server:tool\n^cargo\s"
- if let Some(rest) = response_str.strip_prefix("always_deny_pattern:") {
- if let Some((pattern_tool_name, pattern)) = rest.split_once('\n') {
- let pattern_tool_name = pattern_tool_name.to_string();
- let pattern = pattern.to_string();
- if let Some(fs) = fs.clone() {
- cx.update(|cx| {
- update_settings_file(fs, cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .add_tool_deny_pattern(&pattern_tool_name, pattern);
- });
- });
- }
- } else {
- log::error!("Failed to parse always deny pattern: missing newline separator in '{rest}'");
- }
- return Err(anyhow!("Permission to run tool denied by user"));
- }
+ // Handle simple "allow" / "deny" (once, no persistence)
+ if option_id == "allow" || option_id == "deny" {
+ debug_assert!(
+ outcome.params.is_none(),
+ "unexpected params for once-only permission"
+ );
+ return option_id == "allow";
+ }
- // Handle simple "allow" (allow once)
- if response_str == "allow" {
- return Ok(());
- }
+ debug_assert!(false, "unexpected permission option_id: {option_id}");
+ false
+ }
- // Handle simple "deny" (deny once)
- Err(anyhow!("Permission to run tool denied by user"))
- })
+ /// Persists an "always allow" or "always deny" permission, using sub_patterns
+ /// from params when present.
+ fn persist_always_permission(
+ tool: &str,
+ mode: ToolPermissionMode,
+ params: Option<&acp_thread::SelectedPermissionParams>,
+ fs: Option<Arc<dyn Fs>>,
+ cx: &AsyncApp,
+ ) {
+ let Some(fs) = fs else {
+ return;
+ };
+
+ match params {
+ Some(acp_thread::SelectedPermissionParams::Terminal {
+ patterns: sub_patterns,
+ }) => {
+ debug_assert!(
+ !sub_patterns.is_empty(),
+ "empty sub_patterns for tool {tool} — callers should pass None instead"
+ );
+ let tool = tool.to_string();
+ let sub_patterns = sub_patterns.clone();
+ cx.update(|cx| {
+ update_settings_file(fs, cx, move |settings, _| {
+ let agent = settings.agent.get_or_insert_default();
+ for pattern in sub_patterns {
+ match mode {
+ ToolPermissionMode::Allow => {
+ agent.add_tool_allow_pattern(&tool, pattern);
+ }
+ ToolPermissionMode::Deny => {
+ agent.add_tool_deny_pattern(&tool, pattern);
+ }
+ // If there's no matching pattern this will
+ // default to confirm, so falling through is
+ // fine here.
+ ToolPermissionMode::Confirm => (),
+ }
+ }
+ });
+ });
+ }
+ None => {
+ let tool = tool.to_string();
+ cx.update(|cx| {
+ update_settings_file(fs, cx, move |settings, _| {
+ settings
+ .agent
+ .get_or_insert_default()
+ .set_tool_default_permission(&tool, mode);
+ });
+ });
+ }
+ }
}
}
@@ -3851,6 +3913,15 @@ impl ToolCallEventStreamReceiver {
panic!("Expected terminal but got: {:?}", event);
}
}
+
+ pub async fn expect_plan(&mut self) -> acp::Plan {
+ let event = self.0.next().await;
+ if let Some(Ok(ThreadEvent::Plan(plan))) = event {
+ plan
+ } else {
+ panic!("Expected plan but got: {:?}", event);
+ }
+ }
}
#[cfg(any(test, feature = "test-support"))]
@@ -19,6 +19,7 @@ mod streaming_edit_file_tool;
mod terminal_tool;
mod tool_edit_parser;
mod tool_permissions;
+mod update_plan_tool;
mod web_search_tool;
use crate::AgentTool;
@@ -44,6 +45,7 @@ pub use spawn_agent_tool::*;
pub use streaming_edit_file_tool::*;
pub use terminal_tool::*;
pub use tool_permissions::*;
+pub use update_plan_tool::*;
pub use web_search_tool::*;
macro_rules! tools {
@@ -132,5 +134,6 @@ tools! {
SaveFileTool,
SpawnAgentTool,
TerminalTool,
+ UpdatePlanTool,
WebSearchTool,
}
@@ -266,7 +266,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = task.await;
@@ -372,7 +372,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
assert!(
@@ -241,7 +241,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = task.await;
@@ -359,7 +359,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
assert!(
@@ -301,7 +301,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = task.await;
@@ -428,7 +428,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
assert!(
@@ -1374,7 +1374,7 @@ mod tests {
event
.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
authorize_task.await.unwrap();
}
@@ -848,7 +848,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = task.await;
@@ -273,7 +273,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = task.await;
@@ -379,7 +379,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
assert!(
@@ -896,7 +896,7 @@ mod test {
);
authorization
.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = read_task.await;
@@ -1185,7 +1185,7 @@ mod test {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let result = task.await;
@@ -523,7 +523,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let _result = task.await;
@@ -651,7 +651,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
assert!(
@@ -518,7 +518,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
let _result = task.await;
@@ -646,7 +646,7 @@ mod tests {
);
auth.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
assert!(
@@ -727,7 +727,7 @@ mod tests {
let auth = event_rx.expect_authorization().await;
auth.response
- .send(acp::PermissionOptionId::new("deny"))
+ .send(acp::PermissionOptionId::new("deny").into())
.unwrap();
let output = task.await.unwrap();
@@ -10,21 +10,30 @@ use std::sync::Arc;
use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput};
-/// Spawns an agent to perform a delegated task.
+/// Spawn a sub-agent for a well-scoped task.
///
-/// Use this tool when you want to:
-/// - Run multiple tasks in parallel.
-/// - Delegate a self-contained task where you only need the final outcome.
+/// ### Designing delegated subtasks
+/// - An agent does not see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message.
+/// - Subtasks must be concrete, well-defined, and self-contained.
+/// - Delegated subtasks must materially advance the main task.
+/// - Do not duplicate work between your work and delegated subtasks.
+/// - Do not use this tool for tasks you could accomplish directly with one or two tool calls.
+/// - When you delegate work, focus on coordinating and synthesizing results instead of duplicating the same work yourself.
+/// - Avoid issuing multiple delegate calls for the same unresolved subproblem unless the new delegated task is genuinely different and necessary.
+/// - Narrow the delegated ask to the concrete output you need next.
+/// - For code-edit subtasks, decompose work so each delegated task has a disjoint write set.
+/// - When sending a follow-up using an existing agent session_id, the agent already has the context from the previous turn. Send only a short, direct message. Do NOT repeat the original task or context.
///
-/// Do NOT use this tool for tasks you could accomplish directly with one or two tool calls (e.g. reading a file, running a single command).
+/// ### Parallel delegation patterns
+/// - Run multiple independent information-seeking subtasks in parallel when you have distinct questions that can be answered independently.
+/// - Split implementation into disjoint codebase slices and spawn multiple agents for them in parallel when the write scopes do not overlap.
+/// - When a plan has multiple independent steps, prefer delegating those steps in parallel rather than serializing them unnecessarily.
+/// - Reuse the returned session_id when you want to follow up on the same delegated subproblem instead of creating a duplicate session.
///
-/// You will receive only the agent's final message as output.
-///
-/// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message.
-///
-/// **Follow-up** (with session_id): Sends a follow-up to an existing agent session. The agent already has full context, so send only a short, direct message — do NOT repeat the original task or context. Examples: "Also update the tests", "Fix the compile error in foo.rs", "Retry".
-///
-/// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories).
+/// ### Output
+/// - You will receive only the agent's final message as output.
+/// - Successful calls return a session_id that you can use for follow-up messages.
+/// - Error results may also include a session_id if a session was already created.
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct SpawnAgentToolInput {
@@ -2581,7 +2581,7 @@ mod tests {
event
.response
- .send(acp::PermissionOptionId::new("allow"))
+ .send(acp::PermissionOptionId::new("allow").into())
.unwrap();
authorize_task.await.unwrap();
}
@@ -0,0 +1,290 @@
+use crate::{AgentTool, ToolCallEventStream, ToolInput};
+use agent_client_protocol as acp;
+use gpui::{App, SharedString, Task};
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use std::sync::Arc;
+
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+#[schemars(inline)]
+pub enum PlanEntryStatus {
+ /// The task has not started yet.
+ Pending,
+ /// The task is currently being worked on.
+ InProgress,
+ /// The task has been successfully completed.
+ Completed,
+}
+
+impl From<PlanEntryStatus> for acp::PlanEntryStatus {
+ fn from(value: PlanEntryStatus) -> Self {
+ match value {
+ PlanEntryStatus::Pending => acp::PlanEntryStatus::Pending,
+ PlanEntryStatus::InProgress => acp::PlanEntryStatus::InProgress,
+ PlanEntryStatus::Completed => acp::PlanEntryStatus::Completed,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)]
+#[serde(rename_all = "snake_case")]
+#[schemars(inline)]
+pub enum PlanEntryPriority {
+ High,
+ #[default]
+ Medium,
+ Low,
+}
+
+impl From<PlanEntryPriority> for acp::PlanEntryPriority {
+ fn from(value: PlanEntryPriority) -> Self {
+ match value {
+ PlanEntryPriority::High => acp::PlanEntryPriority::High,
+ PlanEntryPriority::Medium => acp::PlanEntryPriority::Medium,
+ PlanEntryPriority::Low => acp::PlanEntryPriority::Low,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+pub struct PlanItem {
+ /// Human-readable description of what this task aims to accomplish.
+ pub step: String,
+ /// The current status of this task.
+ pub status: PlanEntryStatus,
+ /// The relative importance of this task. Defaults to medium when omitted.
+ #[serde(default)]
+ pub priority: PlanEntryPriority,
+}
+
+impl From<PlanItem> for acp::PlanEntry {
+ fn from(value: PlanItem) -> Self {
+ acp::PlanEntry::new(value.step, value.priority.into(), value.status.into())
+ }
+}
+
+/// Updates the task plan.
+/// Provide a list of plan entries, each with step, status, and optional priority.
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+pub struct UpdatePlanToolInput {
+ /// The list of plan entries and their current statuses.
+ pub plan: Vec<PlanItem>,
+}
+
+pub struct UpdatePlanTool;
+
+impl UpdatePlanTool {
+ fn to_plan(input: UpdatePlanToolInput) -> acp::Plan {
+ acp::Plan::new(input.plan.into_iter().map(Into::into).collect())
+ }
+}
+
+impl AgentTool for UpdatePlanTool {
+ type Input = UpdatePlanToolInput;
+ type Output = String;
+
+ const NAME: &'static str = "update_plan";
+
+ fn kind() -> acp::ToolKind {
+ acp::ToolKind::Think
+ }
+
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
+ match input {
+ Ok(input) if input.plan.is_empty() => "Clear plan".into(),
+ Ok(_) | Err(_) => "Update plan".into(),
+ }
+ }
+
+ fn run(
+ self: Arc<Self>,
+ input: ToolInput<Self::Input>,
+ event_stream: ToolCallEventStream,
+ cx: &mut App,
+ ) -> Task<Result<Self::Output, Self::Output>> {
+ cx.spawn(async move |_cx| {
+ let input = input
+ .recv()
+ .await
+ .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+ event_stream.update_plan(Self::to_plan(input));
+
+ Ok("Plan updated".to_string())
+ })
+ }
+
+ fn replay(
+ &self,
+ input: Self::Input,
+ _output: Self::Output,
+ event_stream: ToolCallEventStream,
+ _cx: &mut App,
+ ) -> anyhow::Result<()> {
+ event_stream.update_plan(Self::to_plan(input));
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::ToolCallEventStream;
+ use gpui::TestAppContext;
+ use pretty_assertions::assert_eq;
+
+ fn sample_input() -> UpdatePlanToolInput {
+ UpdatePlanToolInput {
+ plan: vec![
+ PlanItem {
+ step: "Inspect the existing tool wiring".to_string(),
+ status: PlanEntryStatus::Completed,
+ priority: PlanEntryPriority::High,
+ },
+ PlanItem {
+ step: "Implement the update_plan tool".to_string(),
+ status: PlanEntryStatus::InProgress,
+ priority: PlanEntryPriority::Medium,
+ },
+ PlanItem {
+ step: "Add tests".to_string(),
+ status: PlanEntryStatus::Pending,
+ priority: PlanEntryPriority::Low,
+ },
+ ],
+ }
+ }
+
+ #[gpui::test]
+ async fn test_run_emits_plan_event(cx: &mut TestAppContext) {
+ let tool = Arc::new(UpdatePlanTool);
+ let (event_stream, mut event_rx) = ToolCallEventStream::test();
+
+ let input = sample_input();
+ let result = cx
+ .update(|cx| tool.run(ToolInput::resolved(input.clone()), event_stream, cx))
+ .await
+ .expect("tool should succeed");
+
+ assert_eq!(result, "Plan updated".to_string());
+
+ let plan = event_rx.expect_plan().await;
+ assert_eq!(
+ plan,
+ acp::Plan::new(vec![
+ acp::PlanEntry::new(
+ "Inspect the existing tool wiring",
+ acp::PlanEntryPriority::High,
+ acp::PlanEntryStatus::Completed,
+ ),
+ acp::PlanEntry::new(
+ "Implement the update_plan tool",
+ acp::PlanEntryPriority::Medium,
+ acp::PlanEntryStatus::InProgress,
+ ),
+ acp::PlanEntry::new(
+ "Add tests",
+ acp::PlanEntryPriority::Low,
+ acp::PlanEntryStatus::Pending,
+ ),
+ ])
+ );
+ }
+
+ #[gpui::test]
+ async fn test_replay_emits_plan_event(cx: &mut TestAppContext) {
+ let tool = UpdatePlanTool;
+ let (event_stream, mut event_rx) = ToolCallEventStream::test();
+
+ let input = sample_input();
+
+ cx.update(|cx| {
+ tool.replay(input.clone(), "Plan updated".to_string(), event_stream, cx)
+ .expect("replay should succeed");
+ });
+
+ let plan = event_rx.expect_plan().await;
+ assert_eq!(
+ plan,
+ acp::Plan::new(vec![
+ acp::PlanEntry::new(
+ "Inspect the existing tool wiring",
+ acp::PlanEntryPriority::High,
+ acp::PlanEntryStatus::Completed,
+ ),
+ acp::PlanEntry::new(
+ "Implement the update_plan tool",
+ acp::PlanEntryPriority::Medium,
+ acp::PlanEntryStatus::InProgress,
+ ),
+ acp::PlanEntry::new(
+ "Add tests",
+ acp::PlanEntryPriority::Low,
+ acp::PlanEntryStatus::Pending,
+ ),
+ ])
+ );
+ }
+
+ #[gpui::test]
+ async fn test_run_defaults_priority_to_medium(cx: &mut TestAppContext) {
+ let tool = Arc::new(UpdatePlanTool);
+ let (event_stream, mut event_rx) = ToolCallEventStream::test();
+
+ let input = UpdatePlanToolInput {
+ plan: vec![
+ PlanItem {
+ step: "First".to_string(),
+ status: PlanEntryStatus::InProgress,
+ priority: PlanEntryPriority::default(),
+ },
+ PlanItem {
+ step: "Second".to_string(),
+ status: PlanEntryStatus::InProgress,
+ priority: PlanEntryPriority::default(),
+ },
+ ],
+ };
+
+ let result = cx
+ .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx))
+ .await
+ .expect("tool should succeed");
+
+ assert_eq!(result, "Plan updated".to_string());
+
+ let plan = event_rx.expect_plan().await;
+ assert_eq!(
+ plan,
+ acp::Plan::new(vec![
+ acp::PlanEntry::new(
+ "First",
+ acp::PlanEntryPriority::Medium,
+ acp::PlanEntryStatus::InProgress,
+ ),
+ acp::PlanEntry::new(
+ "Second",
+ acp::PlanEntryPriority::Medium,
+ acp::PlanEntryStatus::InProgress,
+ ),
+ ])
+ );
+ }
+
+ #[gpui::test]
+ async fn test_initial_title(cx: &mut TestAppContext) {
+ let tool = UpdatePlanTool;
+
+ let title = cx.update(|cx| tool.initial_title(Ok(sample_input()), cx));
+ assert_eq!(title, SharedString::from("Update plan"));
+
+ let title =
+ cx.update(|cx| tool.initial_title(Ok(UpdatePlanToolInput { plan: Vec::new() }), cx));
+ assert_eq!(title, SharedString::from("Clear plan"));
+ }
+}
@@ -30,6 +30,7 @@ env_logger = { workspace = true, optional = true }
fs.workspace = true
futures.workspace = true
gpui.workspace = true
+feature_flags.workspace = true
gpui_tokio = { workspace = true, optional = true }
credentials_provider.workspace = true
google_ai.workspace = true
@@ -7,13 +7,14 @@ use action_log::ActionLog;
use agent_client_protocol::{self as acp, Agent as _, ErrorCode};
use anyhow::anyhow;
use collections::HashMap;
+use feature_flags::{AcpBetaFeatureFlag, FeatureFlagAppExt as _};
use futures::AsyncBufReadExt as _;
use futures::io::BufReader;
use project::agent_server_store::AgentServerCommand;
use project::{AgentId, Project};
use serde::Deserialize;
use settings::Settings as _;
-use task::ShellBuilder;
+use task::{ShellBuilder, SpawnInTerminal};
use util::ResultExt as _;
use util::path_list::PathList;
use util::process::Child;
@@ -33,6 +34,8 @@ use terminal::terminal_settings::{AlternateScroll, CursorShape, TerminalSettings
use crate::GEMINI_ID;
+pub const GEMINI_TERMINAL_AUTH_METHOD_ID: &str = "spawn-gemini-cli";
+
#[derive(Debug, Error)]
#[error("Unsupported version")]
pub struct UnsupportedVersion;
@@ -44,6 +47,7 @@ pub struct AcpConnection {
connection: Rc<acp::ClientSideConnection>,
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
auth_methods: Vec<acp::AuthMethod>,
+ command: AgentServerCommand,
agent_capabilities: acp::AgentCapabilities,
default_mode: Option<acp::SessionModeId>,
default_model: Option<acp::ModelId>,
@@ -162,6 +166,7 @@ impl AgentSessionList for AcpSessionList {
pub async fn connect(
agent_id: AgentId,
+ project: Entity<Project>,
display_name: SharedString,
command: AgentServerCommand,
default_mode: Option<acp::SessionModeId>,
@@ -171,6 +176,7 @@ pub async fn connect(
) -> Result<Rc<dyn AgentConnection>> {
let conn = AcpConnection::stdio(
agent_id,
+ project,
display_name,
command.clone(),
default_mode,
@@ -187,6 +193,7 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1
impl AcpConnection {
pub async fn stdio(
agent_id: AgentId,
+ project: Entity<Project>,
display_name: SharedString,
command: AgentServerCommand,
default_mode: Option<acp::SessionModeId>,
@@ -199,6 +206,15 @@ impl AcpConnection {
let mut child =
builder.build_std_command(Some(command.path.display().to_string()), &command.args);
child.envs(command.env.iter().flatten());
+ if let Some(cwd) = project.update(cx, |project, cx| {
+ project
+ .default_path_list(cx)
+ .ordered_paths()
+ .next()
+ .cloned()
+ }) {
+ child.current_dir(cwd);
+ }
let mut child = Child::spawn(child, Stdio::piped(), Stdio::piped(), Stdio::piped())?;
let stdout = child.stdout.take().context("Failed to take stdout")?;
@@ -286,6 +302,7 @@ impl AcpConnection {
.read_text_file(true)
.write_text_file(true))
.terminal(true)
+ .auth(acp::AuthCapabilities::new().terminal(true))
// Experimental: Allow for rendering terminal output from the agents
.meta(acp::Meta::from_iter([
("terminal_output".into(), true.into()),
@@ -335,7 +352,7 @@ impl AcpConnection {
});
let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]);
vec![acp::AuthMethod::Agent(
- acp::AuthMethodAgent::new("spawn-gemini-cli", "Login")
+ acp::AuthMethodAgent::new(GEMINI_TERMINAL_AUTH_METHOD_ID, "Login")
.description("Login with your Google or Vertex AI account")
.meta(meta),
)]
@@ -345,6 +362,7 @@ impl AcpConnection {
Ok(Self {
id: agent_id,
auth_methods,
+ command,
connection,
display_name,
telemetry_id,
@@ -468,6 +486,64 @@ impl Drop for AcpConnection {
}
}
+fn terminal_auth_task_id(agent_id: &AgentId, method_id: &acp::AuthMethodId) -> String {
+ format!("external-agent-{}-{}-login", agent_id.0, method_id.0)
+}
+
+fn terminal_auth_task(
+ command: &AgentServerCommand,
+ agent_id: &AgentId,
+ method: &acp::AuthMethodTerminal,
+) -> SpawnInTerminal {
+ let mut args = command.args.clone();
+ args.extend(method.args.clone());
+
+ let mut env = command.env.clone().unwrap_or_default();
+ env.extend(method.env.clone());
+
+ acp_thread::build_terminal_auth_task(
+ terminal_auth_task_id(agent_id, &method.id),
+ method.name.clone(),
+ command.path.to_string_lossy().into_owned(),
+ args,
+ env,
+ )
+}
+
+/// Used to support the _meta method prior to stabilization
+fn meta_terminal_auth_task(
+ agent_id: &AgentId,
+ method_id: &acp::AuthMethodId,
+ method: &acp::AuthMethod,
+) -> Option<SpawnInTerminal> {
+ #[derive(Deserialize)]
+ struct MetaTerminalAuth {
+ label: String,
+ command: String,
+ #[serde(default)]
+ args: Vec<String>,
+ #[serde(default)]
+ env: HashMap<String, String>,
+ }
+
+ let meta = match method {
+ acp::AuthMethod::EnvVar(env_var) => env_var.meta.as_ref(),
+ acp::AuthMethod::Terminal(terminal) => terminal.meta.as_ref(),
+ acp::AuthMethod::Agent(agent) => agent.meta.as_ref(),
+ _ => None,
+ }?;
+ let terminal_auth =
+ serde_json::from_value::<MetaTerminalAuth>(meta.get("terminal-auth")?.clone()).ok()?;
+
+ Some(acp_thread::build_terminal_auth_task(
+ terminal_auth_task_id(agent_id, method_id),
+ terminal_auth.label.clone(),
+ terminal_auth.command,
+ terminal_auth.args,
+ terminal_auth.env,
+ ))
+}
+
impl AgentConnection for AcpConnection {
fn agent_id(&self) -> AgentId {
self.id.clone()
@@ -813,6 +889,24 @@ impl AgentConnection for AcpConnection {
&self.auth_methods
}
+ fn terminal_auth_task(
+ &self,
+ method_id: &acp::AuthMethodId,
+ cx: &App,
+ ) -> Option<SpawnInTerminal> {
+ let method = self
+ .auth_methods
+ .iter()
+ .find(|method| method.id() == method_id)?;
+
+ match method {
+ acp::AuthMethod::Terminal(terminal) if cx.has_flag::<AcpBetaFeatureFlag>() => {
+ Some(terminal_auth_task(&self.command, &self.id, terminal))
+ }
+ _ => meta_terminal_auth_task(&self.id, method_id, method),
+ }
+ }
+
fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>> {
let conn = self.connection.clone();
cx.foreground_executor().spawn(async move {
@@ -979,6 +1073,149 @@ fn map_acp_error(err: acp::Error) -> anyhow::Error {
}
}
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn terminal_auth_task_reuses_command_and_merges_args_and_env() {
+ let command = AgentServerCommand {
+ path: "/path/to/agent".into(),
+ args: vec!["--acp".into(), "--verbose".into()],
+ env: Some(HashMap::from_iter([
+ ("BASE".into(), "1".into()),
+ ("SHARED".into(), "base".into()),
+ ])),
+ };
+ let method = acp::AuthMethodTerminal::new("login", "Login")
+ .args(vec!["/auth".into()])
+ .env(std::collections::HashMap::from_iter([
+ ("EXTRA".into(), "2".into()),
+ ("SHARED".into(), "override".into()),
+ ]));
+
+ let terminal_auth_task = terminal_auth_task(&command, &AgentId::new("test-agent"), &method);
+
+ assert_eq!(
+ terminal_auth_task.command.as_deref(),
+ Some("/path/to/agent")
+ );
+ assert_eq!(terminal_auth_task.args, vec!["--acp", "--verbose", "/auth"]);
+ assert_eq!(
+ terminal_auth_task.env,
+ HashMap::from_iter([
+ ("BASE".into(), "1".into()),
+ ("SHARED".into(), "override".into()),
+ ("EXTRA".into(), "2".into()),
+ ])
+ );
+ assert_eq!(terminal_auth_task.label, "Login");
+ assert_eq!(terminal_auth_task.command_label, "Login");
+ }
+
+ #[test]
+ fn legacy_terminal_auth_task_parses_meta_and_retries_session() {
+ let method_id = acp::AuthMethodId::new("legacy-login");
+ let method = acp::AuthMethod::Agent(
+ acp::AuthMethodAgent::new(method_id.clone(), "Login").meta(acp::Meta::from_iter([(
+ "terminal-auth".to_string(),
+ serde_json::json!({
+ "label": "legacy /auth",
+ "command": "legacy-agent",
+ "args": ["auth", "--interactive"],
+ "env": {
+ "AUTH_MODE": "interactive",
+ },
+ }),
+ )])),
+ );
+
+ let terminal_auth_task =
+ meta_terminal_auth_task(&AgentId::new("test-agent"), &method_id, &method)
+ .expect("expected legacy terminal auth task");
+
+ assert_eq!(
+ terminal_auth_task.id.0,
+ "external-agent-test-agent-legacy-login-login"
+ );
+ assert_eq!(terminal_auth_task.command.as_deref(), Some("legacy-agent"));
+ assert_eq!(terminal_auth_task.args, vec!["auth", "--interactive"]);
+ assert_eq!(
+ terminal_auth_task.env,
+ HashMap::from_iter([("AUTH_MODE".into(), "interactive".into())])
+ );
+ assert_eq!(terminal_auth_task.label, "legacy /auth");
+ }
+
+ #[test]
+ fn legacy_terminal_auth_task_returns_none_for_invalid_meta() {
+ let method_id = acp::AuthMethodId::new("legacy-login");
+ let method = acp::AuthMethod::Agent(
+ acp::AuthMethodAgent::new(method_id.clone(), "Login").meta(acp::Meta::from_iter([(
+ "terminal-auth".to_string(),
+ serde_json::json!({
+ "label": "legacy /auth",
+ }),
+ )])),
+ );
+
+ assert!(
+ meta_terminal_auth_task(&AgentId::new("test-agent"), &method_id, &method).is_none()
+ );
+ }
+
+ #[test]
+ fn first_class_terminal_auth_takes_precedence_over_legacy_meta() {
+ let method_id = acp::AuthMethodId::new("login");
+ let method = acp::AuthMethod::Terminal(
+ acp::AuthMethodTerminal::new(method_id, "Login")
+ .args(vec!["/auth".into()])
+ .env(std::collections::HashMap::from_iter([(
+ "AUTH_MODE".into(),
+ "first-class".into(),
+ )]))
+ .meta(acp::Meta::from_iter([(
+ "terminal-auth".to_string(),
+ serde_json::json!({
+ "label": "legacy /auth",
+ "command": "legacy-agent",
+ "args": ["legacy-auth"],
+ "env": {
+ "AUTH_MODE": "legacy",
+ },
+ }),
+ )])),
+ );
+
+ let command = AgentServerCommand {
+ path: "/path/to/agent".into(),
+ args: vec!["--acp".into()],
+ env: Some(HashMap::from_iter([("BASE".into(), "1".into())])),
+ };
+
+ let terminal_auth_task = match &method {
+ acp::AuthMethod::Terminal(terminal) => {
+ terminal_auth_task(&command, &AgentId::new("test-agent"), terminal)
+ }
+ _ => unreachable!(),
+ };
+
+ assert_eq!(
+ terminal_auth_task.command.as_deref(),
+ Some("/path/to/agent")
+ );
+ assert_eq!(terminal_auth_task.args, vec!["--acp", "/auth"]);
+ assert_eq!(
+ terminal_auth_task.env,
+ HashMap::from_iter([
+ ("BASE".into(), "1".into()),
+ ("AUTH_MODE".into(), "first-class".into()),
+ ])
+ );
+ assert_eq!(terminal_auth_task.label, "Login");
+ }
+}
+
fn mcp_servers_for_project(project: &Entity<Project>, cx: &App) -> Vec<acp::McpServer> {
let context_server_store = project.read(cx).context_server_store().read(cx);
let is_local = project.read(cx).is_local();
@@ -1233,7 +1470,7 @@ impl acp::Client for ClientDelegate {
let outcome = task.await;
- Ok(acp::RequestPermissionResponse::new(outcome))
+ Ok(acp::RequestPermissionResponse::new(outcome.into()))
}
async fn write_text_file(
@@ -9,7 +9,7 @@ use collections::{HashMap, HashSet};
pub use custom::*;
use fs::Fs;
use http_client::read_no_proxy_from_env;
-use project::{AgentId, agent_server_store::AgentServerStore};
+use project::{AgentId, Project, agent_server_store::AgentServerStore};
use acp_thread::AgentConnection;
use anyhow::Result;
@@ -17,7 +17,7 @@ use gpui::{App, AppContext, Entity, Task};
use settings::SettingsStore;
use std::{any::Any, rc::Rc, sync::Arc};
-pub use acp::AcpConnection;
+pub use acp::{AcpConnection, GEMINI_TERMINAL_AUTH_METHOD_ID};
pub struct AgentServerDelegate {
store: Entity<AgentServerStore>,
@@ -42,6 +42,7 @@ pub trait AgentServer: Send {
fn connect(
&self,
delegate: AgentServerDelegate,
+ project: Entity<Project>,
cx: &mut App,
) -> Task<Result<Rc<dyn AgentConnection>>>;
@@ -5,9 +5,12 @@ use anyhow::{Context as _, Result};
use collections::HashSet;
use credentials_provider::CredentialsProvider;
use fs::Fs;
-use gpui::{App, AppContext as _, Task};
+use gpui::{App, AppContext as _, Entity, Task};
use language_model::{ApiKey, EnvVar};
-use project::agent_server_store::{AgentId, AllAgentServersSettings};
+use project::{
+ Project,
+ agent_server_store::{AgentId, AllAgentServersSettings},
+};
use settings::{SettingsStore, update_settings_file};
use std::{rc::Rc, sync::Arc};
use ui::IconName;
@@ -289,6 +292,7 @@ impl AgentServer for CustomAgentServer {
fn connect(
&self,
delegate: AgentServerDelegate,
+ project: Entity<Project>,
cx: &mut App,
) -> Task<Result<Rc<dyn AgentConnection>>> {
let agent_id = self.agent_id();
@@ -371,6 +375,7 @@ impl AgentServer for CustomAgentServer {
.await?;
let connection = crate::acp::connect(
agent_id,
+ project,
display_name,
command,
default_mode,
@@ -479,6 +484,7 @@ mod tests {
description: SharedString::from(""),
version: SharedString::from("1.0.0"),
repository: None,
+ website: None,
icon_path: None,
},
package: id,
@@ -208,7 +208,7 @@ pub async fn test_tool_call_with_permission<T, F>(
thread.update(cx, |thread, cx| {
thread.authorize_tool_call(
tool_call_id,
- allow_option_id,
+ allow_option_id.into(),
acp::PermissionOptionKind::AllowOnce,
cx,
);
@@ -434,7 +434,10 @@ pub async fn new_test_thread(
let store = project.read_with(cx, |project, _| project.agent_server_store().clone());
let delegate = AgentServerDelegate::new(store, None);
- let connection = cx.update(|cx| server.connect(delegate, cx)).await.unwrap();
+ let connection = cx
+ .update(|cx| server.connect(delegate, project.clone(), cx))
+ .await
+ .unwrap();
cx.update(|cx| {
connection.new_session(project.clone(), PathList::new(&[current_dir.as_ref()]), cx)
@@ -34,7 +34,7 @@ agent_servers.workspace = true
agent_settings.workspace = true
ai_onboarding.workspace = true
anyhow.workspace = true
-arrayvec.workspace = true
+heapless.workspace = true
assistant_text_thread.workspace = true
assistant_slash_command.workspace = true
assistant_slash_commands.workspace = true
@@ -160,7 +160,7 @@ impl AgentConnectionStore {
let agent_server_store = self.project.read(cx).agent_server_store().clone();
let delegate = AgentServerDelegate::new(agent_server_store, Some(new_version_tx));
- let connect_task = server.connect(delegate, cx);
+ let connect_task = server.connect(delegate, self.project.clone(), cx);
let connect_task = cx.spawn(async move |_this, cx| match connect_task.await {
Ok(connection) => cx.update(|cx| {
let history = connection
@@ -14,7 +14,7 @@ use agent::{ContextServerRegistry, SharedThread, ThreadStore};
use agent_client_protocol as acp;
use agent_servers::AgentServer;
use collections::HashSet;
-use db::kvp::{Dismissable, KEY_VALUE_STORE};
+use db::kvp::{Dismissable, KeyValueStore};
use itertools::Itertools;
use project::AgentId;
use serde::{Deserialize, Serialize};
@@ -60,7 +60,6 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use extension::ExtensionEvents;
use extension_host::ExtensionStore;
use fs::Fs;
-use git::repository::validate_worktree_directory;
use gpui::{
Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner,
DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels,
@@ -76,8 +75,8 @@ use search::{BufferSearchBar, buffer_search};
use settings::{Settings, update_settings_file};
use theme::ThemeSettings;
use ui::{
- Button, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu,
- PopoverMenuHandle, SpinnerLabel, Tab, Tooltip, prelude::*, utils::WithRemSize,
+ Button, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, DocumentationSide,
+ KeyBinding, PopoverMenu, PopoverMenuHandle, Tab, Tooltip, prelude::*, utils::WithRemSize,
};
use util::{ResultExt as _, debug_panic};
use workspace::{
@@ -95,8 +94,11 @@ const AGENT_PANEL_KEY: &str = "agent_panel";
const RECENTLY_UPDATED_MENU_LIMIT: usize = 6;
const DEFAULT_THREAD_TITLE: &str = "New Thread";
-fn read_serialized_panel(workspace_id: workspace::WorkspaceId) -> Option<SerializedAgentPanel> {
- let scope = KEY_VALUE_STORE.scoped(AGENT_PANEL_KEY);
+fn read_serialized_panel(
+ workspace_id: workspace::WorkspaceId,
+ kvp: &KeyValueStore,
+) -> Option<SerializedAgentPanel> {
+ let scope = kvp.scoped(AGENT_PANEL_KEY);
let key = i64::from(workspace_id).to_string();
scope
.read(&key)
@@ -108,8 +110,9 @@ fn read_serialized_panel(workspace_id: workspace::WorkspaceId) -> Option<Seriali
async fn save_serialized_panel(
workspace_id: workspace::WorkspaceId,
panel: SerializedAgentPanel,
+ kvp: KeyValueStore,
) -> Result<()> {
- let scope = KEY_VALUE_STORE.scoped(AGENT_PANEL_KEY);
+ let scope = kvp.scoped(AGENT_PANEL_KEY);
let key = i64::from(workspace_id).to_string();
scope.write(key, serde_json::to_string(&panel)?).await?;
Ok(())
@@ -117,9 +120,8 @@ async fn save_serialized_panel(
/// Migration: reads the original single-panel format stored under the
/// `"agent_panel"` KVP key before per-workspace keying was introduced.
-fn read_legacy_serialized_panel() -> Option<SerializedAgentPanel> {
- KEY_VALUE_STORE
- .read_kvp(AGENT_PANEL_KEY)
+fn read_legacy_serialized_panel(kvp: &KeyValueStore) -> Option<SerializedAgentPanel> {
+ kvp.read_kvp(AGENT_PANEL_KEY)
.log_err()
.flatten()
.and_then(|json| serde_json::from_str::<SerializedAgentPanel>(&json).log_err())
@@ -220,7 +222,7 @@ pub fn init(cx: &mut App) {
.register_action(|workspace, _: &OpenAgentDiff, window, cx| {
let thread = workspace
.panel::<AgentPanel>(cx)
- .and_then(|panel| panel.read(cx).active_conversation().cloned())
+ .and_then(|panel| panel.read(cx).active_conversation_view().cloned())
.and_then(|conversation| {
conversation
.read(cx)
@@ -402,17 +404,17 @@ pub fn init(cx: &mut App) {
});
},
)
- .register_action(|workspace, action: &StartThreadIn, _window, cx| {
+ .register_action(|workspace, action: &StartThreadIn, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
panel.update(cx, |panel, cx| {
- panel.set_start_thread_in(action, cx);
+ panel.set_start_thread_in(action, window, cx);
});
}
})
- .register_action(|workspace, _: &CycleStartThreadIn, _window, cx| {
+ .register_action(|workspace, _: &CycleStartThreadIn, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
panel.update(cx, |panel, cx| {
- panel.cycle_start_thread_in(cx);
+ panel.cycle_start_thread_in(window, cx);
});
}
});
@@ -782,6 +784,7 @@ impl AgentPanel {
}
});
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = Some(cx.background_spawn(async move {
save_serialized_panel(
workspace_id,
@@ -791,6 +794,7 @@ impl AgentPanel {
last_active_thread,
start_thread_in,
},
+ kvp,
)
.await?;
anyhow::Ok(())
@@ -803,6 +807,7 @@ impl AgentPanel {
mut cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
let prompt_store = cx.update(|_window, cx| PromptStore::global(cx));
+ let kvp = cx.update(|_window, cx| KeyValueStore::global(cx)).ok();
cx.spawn(async move |cx| {
let prompt_store = match prompt_store {
Ok(prompt_store) => prompt_store.await.ok(),
@@ -815,9 +820,11 @@ impl AgentPanel {
let serialized_panel = cx
.background_spawn(async move {
- workspace_id
- .and_then(read_serialized_panel)
- .or_else(read_legacy_serialized_panel)
+ kvp.and_then(|kvp| {
+ workspace_id
+ .and_then(|id| read_serialized_panel(id, &kvp))
+ .or_else(|| read_legacy_serialized_panel(&kvp))
+ })
})
.await;
@@ -1089,7 +1096,7 @@ impl AgentPanel {
_worktree_creation_task: None,
show_trust_workspace_message: false,
last_configuration_error_telemetry: None,
- on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()),
+ on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed(cx)),
_active_view_observation: None,
};
@@ -1181,18 +1188,6 @@ impl AgentPanel {
.unwrap_or(false)
}
- pub fn active_conversation(&self) -> Option<&Entity<ConversationView>> {
- match &self.active_view {
- ActiveView::AgentThread {
- conversation_view, ..
- } => Some(conversation_view),
- ActiveView::Uninitialized
- | ActiveView::TextThread { .. }
- | ActiveView::History { .. }
- | ActiveView::Configuration => None,
- }
- }
-
pub fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context<Self>) {
self.new_agent_thread(AgentType::NativeAgent, window, cx);
}
@@ -1308,16 +1303,17 @@ impl AgentPanel {
}
let thread_store = self.thread_store.clone();
+ let kvp = KeyValueStore::global(cx);
if let Some(agent) = agent_choice {
cx.background_spawn({
let agent = agent.clone();
+ let kvp = kvp;
async move {
if let Some(serialized) =
serde_json::to_string(&LastUsedExternalAgent { agent }).log_err()
{
- KEY_VALUE_STORE
- .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
+ kvp.write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
.await
.log_err();
}
@@ -1344,17 +1340,15 @@ impl AgentPanel {
let ext_agent = if is_via_collab {
Agent::NativeAgent
} else {
- cx.background_spawn(async move {
- KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY)
- })
- .await
- .log_err()
- .flatten()
- .and_then(|value| {
- serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
- })
- .map(|agent| agent.agent)
- .unwrap_or(Agent::NativeAgent)
+ cx.background_spawn(async move { kvp.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) })
+ .await
+ .log_err()
+ .flatten()
+ .and_then(|value| {
+ serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
+ })
+ .map(|agent| agent.agent)
+ .unwrap_or(Agent::NativeAgent)
};
let server = ext_agent.server(fs, thread_store);
@@ -1405,7 +1399,7 @@ impl AgentPanel {
}
fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let Some(conversation_view) = self.active_conversation() else {
+ let Some(conversation_view) = self.active_conversation_view() else {
return;
};
@@ -1731,7 +1725,7 @@ impl AgentPanel {
cx: &mut Context<Self>,
) {
if let Some(workspace) = self.workspace.upgrade()
- && let Some(conversation_view) = self.active_conversation()
+ && let Some(conversation_view) = self.active_conversation_view()
&& let Some(active_thread) = conversation_view.read(cx).active_thread().cloned()
{
active_thread.update(cx, |thread, cx| {
@@ -1972,13 +1966,13 @@ impl AgentPanel {
let mut views = Vec::new();
if let Some(server_view) = self.active_conversation_view() {
- if let Some(thread_view) = server_view.read(cx).parent_thread(cx) {
+ if let Some(thread_view) = server_view.read(cx).root_thread(cx) {
views.push(thread_view);
}
}
for server_view in self.background_threads.values() {
- if let Some(thread_view) = server_view.read(cx).parent_thread(cx) {
+ if let Some(thread_view) = server_view.read(cx).root_thread(cx) {
views.push(thread_view);
}
}
@@ -1991,22 +1985,46 @@ impl AgentPanel {
return;
};
- let Some(thread_view) = conversation_view.read(cx).parent_thread(cx) else {
+ let Some(thread_view) = conversation_view.read(cx).root_thread(cx) else {
return;
};
- let thread = &thread_view.read(cx).thread;
- let (status, session_id) = {
- let thread = thread.read(cx);
- (thread.status(), thread.session_id().clone())
- };
+ self.background_threads
+ .insert(thread_view.read(cx).id.clone(), conversation_view);
+ self.cleanup_background_threads(cx);
+ }
- if status != ThreadStatus::Generating {
- return;
- }
+ /// We keep threads that are:
+ /// - Still running
+ /// - Do not support reloading the full session
+ /// - Have had the most recent events (up to 5 idle threads)
+ fn cleanup_background_threads(&mut self, cx: &App) {
+ let mut potential_removals = self
+ .background_threads
+ .iter()
+ .filter(|(_id, view)| {
+ let Some(thread_view) = view.read(cx).root_thread(cx) else {
+ return true;
+ };
+ let thread = thread_view.read(cx).thread.read(cx);
+ thread.connection().supports_load_session() && thread.status() == ThreadStatus::Idle
+ })
+ .collect::<Vec<_>>();
- self.background_threads
- .insert(session_id, conversation_view);
+ const MAX_IDLE_BACKGROUND_THREADS: usize = 5;
+
+ potential_removals.sort_unstable_by_key(|(_, view)| view.read(cx).updated_at(cx));
+ let n = potential_removals
+ .len()
+ .saturating_sub(MAX_IDLE_BACKGROUND_THREADS);
+ let to_remove = potential_removals
+ .into_iter()
+ .map(|(id, _)| id.clone())
+ .take(n)
+ .collect::<Vec<_>>();
+ for id in to_remove {
+ self.background_threads.remove(&id);
+ }
}
pub(crate) fn active_native_agent_thread(&self, cx: &App) -> Option<Entity<agent::Thread>> {
@@ -2233,7 +2251,12 @@ impl AgentPanel {
&self.start_thread_in
}
- fn set_start_thread_in(&mut self, action: &StartThreadIn, cx: &mut Context<Self>) {
+ fn set_start_thread_in(
+ &mut self,
+ action: &StartThreadIn,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
if matches!(action, StartThreadIn::NewWorktree) && !cx.has_flag::<AgentV2FeatureFlag>() {
return;
}
@@ -2255,16 +2278,19 @@ impl AgentPanel {
}
};
self.start_thread_in = new_target;
+ if let Some(thread) = self.active_thread_view(cx) {
+ thread.update(cx, |thread, cx| thread.focus_handle(cx).focus(window, cx));
+ }
self.serialize(cx);
cx.notify();
}
- fn cycle_start_thread_in(&mut self, cx: &mut Context<Self>) {
+ fn cycle_start_thread_in(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let next = match self.start_thread_in {
StartThreadIn::LocalProject => StartThreadIn::NewWorktree,
StartThreadIn::NewWorktree => StartThreadIn::LocalProject,
};
- self.set_start_thread_in(&next, cx);
+ self.set_start_thread_in(&next, window, cx);
}
fn reset_start_thread_in_to_default(&mut self, cx: &mut Context<Self>) {
@@ -2272,7 +2298,13 @@ impl AgentPanel {
let default = AgentSettings::get_global(cx).new_thread_location;
let start_thread_in = match default {
NewThreadLocation::LocalProject => StartThreadIn::LocalProject,
- NewThreadLocation::NewWorktree => StartThreadIn::NewWorktree,
+ NewThreadLocation::NewWorktree => {
+ if self.project_has_git_repository(cx) {
+ StartThreadIn::NewWorktree
+ } else {
+ StartThreadIn::LocalProject
+ }
+ }
};
if self.start_thread_in != start_thread_in {
self.start_thread_in = start_thread_in;
@@ -2505,6 +2537,10 @@ impl AgentPanel {
.is_some_and(|thread| !thread.read(cx).entries().is_empty())
}
+ pub fn active_thread_is_draft(&self, cx: &App) -> bool {
+ self.active_conversation_view().is_some() && !self.active_thread_has_messages(cx)
+ }
+
fn handle_first_send_requested(
&mut self,
thread_view: Entity<ThreadView>,
@@ -2606,11 +2642,10 @@ impl AgentPanel {
for repo in git_repos {
let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| {
- let original_repo = repo.original_repo_abs_path.clone();
- let directory =
- validate_worktree_directory(&original_repo, worktree_directory_setting)?;
- let new_path = directory.join(branch_name);
- let receiver = repo.create_worktree(branch_name.to_string(), directory, None);
+ let new_path =
+ repo.path_for_new_linked_worktree(branch_name, worktree_directory_setting)?;
+ let receiver =
+ repo.create_worktree(branch_name.to_string(), new_path.clone(), None);
let work_dir = repo.work_directory_abs_path.clone();
anyhow::Ok((work_dir, new_path, receiver))
})?;
@@ -3021,8 +3056,17 @@ impl AgentPanel {
multi_workspace.activate(new_workspace.clone(), cx);
})?;
- this.update_in(cx, |this, _window, cx| {
+ this.update_in(cx, |this, window, cx| {
this.worktree_creation_status = None;
+
+ if let Some(thread_view) = this.active_thread_view(cx) {
+ thread_view.update(cx, |thread_view, cx| {
+ thread_view
+ .message_editor
+ .update(cx, |editor, cx| editor.clear(window, cx));
+ });
+ }
+
cx.notify();
})?;
@@ -3168,12 +3212,12 @@ impl AgentPanel {
ActiveView::AgentThread { conversation_view } => {
let server_view_ref = conversation_view.read(cx);
let is_generating_title = server_view_ref.as_native_thread(cx).is_some()
- && server_view_ref.parent_thread(cx).map_or(false, |tv| {
+ && server_view_ref.root_thread(cx).map_or(false, |tv| {
tv.read(cx).thread.read(cx).has_provisional_title()
});
if let Some(title_editor) = server_view_ref
- .parent_thread(cx)
+ .root_thread(cx)
.map(|r| r.read(cx).title_editor.clone())
{
if is_generating_title {
@@ -3650,6 +3694,7 @@ impl AgentPanel {
fn render_toolbar(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let agent_server_store = self.project.read(cx).agent_server_store().clone();
+ let has_visible_worktrees = self.project.read(cx).visible_worktrees(cx).next().is_some();
let focus_handle = self.focus_handle(cx);
let (selected_agent_custom_icon, selected_agent_label) =
@@ -3887,7 +3932,7 @@ impl AgentPanel {
};
let is_thread_loading = self
- .active_conversation()
+ .active_conversation_view()
.map(|thread| thread.read(cx).is_loading())
.unwrap_or(false);
@@ -4010,7 +4055,10 @@ impl AgentPanel {
.gap(DynamicSpacing::Base04.rems(cx))
.pl(DynamicSpacing::Base04.rems(cx))
.child(agent_selector_menu)
- .child(self.render_start_thread_in_selector(cx)),
+ .when(
+ has_visible_worktrees && self.project_has_git_repository(cx),
+ |this| this.child(self.render_start_thread_in_selector(cx)),
+ ),
)
.child(
h_flex()
@@ -4089,48 +4137,38 @@ impl AgentPanel {
match status {
WorktreeCreationStatus::Creating => Some(
h_flex()
+ .absolute()
+ .bottom_12()
.w_full()
- .px(DynamicSpacing::Base06.rems(cx))
- .py(DynamicSpacing::Base02.rems(cx))
- .gap_2()
- .bg(cx.theme().colors().surface_background)
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .child(SpinnerLabel::new().size(LabelSize::Small))
+ .p_2()
+ .gap_1()
+ .justify_center()
+ .bg(cx.theme().colors().editor_background)
+ .child(
+ Icon::new(IconName::LoadCircle)
+ .size(IconSize::Small)
+ .color(Color::Muted)
+ .with_rotate_animation(3),
+ )
.child(
- Label::new("Creating worktree…")
+ Label::new("Creating Worktree…")
.color(Color::Muted)
.size(LabelSize::Small),
)
.into_any_element(),
),
WorktreeCreationStatus::Error(message) => Some(
- h_flex()
- .w_full()
- .px(DynamicSpacing::Base06.rems(cx))
- .py(DynamicSpacing::Base02.rems(cx))
- .gap_2()
- .bg(cx.theme().colors().surface_background)
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .child(
- Icon::new(IconName::Warning)
- .size(IconSize::Small)
- .color(Color::Warning),
- )
- .child(
- Label::new(message.clone())
- .color(Color::Warning)
- .size(LabelSize::Small)
- .truncate(),
- )
+ Callout::new()
+ .icon(IconName::Warning)
+ .severity(Severity::Warning)
+ .title(message.clone())
.into_any_element(),
),
}
}
fn should_render_trial_end_upsell(&self, cx: &mut Context<Self>) -> bool {
- if TrialEndUpsell::dismissed() {
+ if TrialEndUpsell::dismissed(cx) {
return false;
}
@@ -4559,14 +4597,13 @@ impl Render for AgentPanel {
.on_action(cx.listener(Self::reset_font_size))
.on_action(cx.listener(Self::toggle_zoom))
.on_action(cx.listener(|this, _: &ReauthenticateAgent, window, cx| {
- if let Some(conversation_view) = this.active_conversation() {
+ if let Some(conversation_view) = this.active_conversation_view() {
conversation_view.update(cx, |conversation_view, cx| {
conversation_view.reauthenticate(window, cx)
})
}
}))
.child(self.render_toolbar(window, cx))
- .children(self.render_worktree_creation_status(cx))
.children(self.render_workspace_trust_message(cx))
.children(self.render_onboarding(window, cx))
.map(|parent| {
@@ -4623,6 +4660,7 @@ impl Render for AgentPanel {
ActiveView::Configuration => parent.children(self.configuration.clone()),
}
})
+ .children(self.render_worktree_creation_status(cx))
.children(self.render_trial_end_upsell(window, cx));
match self.active_view.which_font_size_used() {
@@ -4755,7 +4793,7 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate {
// Wait to create a new context until the workspace is no longer
// being updated.
cx.defer_in(window, move |panel, window, cx| {
- if let Some(conversation_view) = panel.active_conversation() {
+ if let Some(conversation_view) = panel.active_conversation_view() {
conversation_view.update(cx, |conversation_view, cx| {
conversation_view.insert_selections(window, cx);
});
@@ -4793,7 +4831,7 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate {
// Wait to create a new context until the workspace is no longer
// being updated.
cx.defer_in(window, move |panel, window, cx| {
- if let Some(conversation_view) = panel.active_conversation() {
+ if let Some(conversation_view) = panel.active_conversation_view() {
conversation_view.update(cx, |conversation_view, cx| {
conversation_view.insert_terminal_text(text, window, cx);
});
@@ -4859,7 +4897,7 @@ impl AgentPanel {
/// This is a test-only accessor that exposes the private `active_thread_view()`
/// method for test assertions. Not compiled into production builds.
pub fn active_thread_view_for_tests(&self) -> Option<&Entity<ConversationView>> {
- self.active_conversation()
+ self.active_conversation_view()
}
/// Sets the start_thread_in value directly, bypassing validation.
@@ -4922,7 +4960,10 @@ impl AgentPanel {
mod tests {
use super::*;
use crate::conversation_view::tests::{StubAgentServer, init_test};
- use crate::test_support::{active_session_id, open_thread_with_connection, send_message};
+ use crate::test_support::{
+ active_session_id, open_thread_with_connection, open_thread_with_custom_connection,
+ send_message,
+ };
use acp_thread::{StubAgentConnection, ThreadStatus};
use agent_servers::CODEX_ID;
use assistant_text_thread::TextThreadStore;
@@ -4931,6 +4972,7 @@ mod tests {
use gpui::{TestAppContext, VisualTestContext};
use project::Project;
use serde_json::json;
+ use std::time::Instant;
use workspace::MultiWorkspace;
#[gpui::test]
@@ -5045,7 +5087,7 @@ mod tests {
"workspace A agent type should be restored"
);
assert!(
- panel.active_conversation().is_some(),
+ panel.active_conversation_view().is_some(),
"workspace A should have its active thread restored"
);
});
@@ -5065,7 +5107,7 @@ mod tests {
"workspace B agent type should be restored"
);
assert!(
- panel.active_conversation().is_none(),
+ panel.active_conversation_view().is_none(),
"workspace B should have no active thread"
);
});
@@ -5398,6 +5440,41 @@ mod tests {
assert!(uri.contains("utils.rs"), "URI should encode the file path");
}
+ fn open_generating_thread_with_loadable_connection(
+ panel: &Entity<AgentPanel>,
+ connection: &StubAgentConnection,
+ cx: &mut VisualTestContext,
+ ) -> acp::SessionId {
+ open_thread_with_custom_connection(panel, connection.clone(), cx);
+ let session_id = active_session_id(panel, cx);
+ send_message(panel, cx);
+ cx.update(|_, cx| {
+ connection.send_update(
+ session_id.clone(),
+ acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("done".into())),
+ cx,
+ );
+ });
+ cx.run_until_parked();
+ session_id
+ }
+
+ fn open_idle_thread_with_non_loadable_connection(
+ panel: &Entity<AgentPanel>,
+ connection: &StubAgentConnection,
+ cx: &mut VisualTestContext,
+ ) -> acp::SessionId {
+ open_thread_with_custom_connection(panel, connection.clone(), cx);
+ let session_id = active_session_id(panel, cx);
+
+ connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
+ acp::ContentChunk::new("done".into()),
+ )]);
+ send_message(panel, cx);
+
+ session_id
+ }
+
async fn setup_panel(cx: &mut TestAppContext) -> (Entity<AgentPanel>, VisualTestContext) {
init_test(cx);
cx.update(|cx| {
@@ -5471,7 +5548,7 @@ mod tests {
}
#[gpui::test]
- async fn test_idle_thread_dropped_when_navigating_away(cx: &mut TestAppContext) {
+ async fn test_idle_non_loadable_thread_retained_when_navigating_away(cx: &mut TestAppContext) {
let (panel, mut cx) = setup_panel(cx).await;
let connection_a = StubAgentConnection::new();
@@ -5482,8 +5559,9 @@ mod tests {
send_message(&panel, &mut cx);
let weak_view_a = panel.read_with(&cx, |panel, _cx| {
- panel.active_conversation().unwrap().downgrade()
+ panel.active_conversation_view().unwrap().downgrade()
});
+ let session_id_a = active_session_id(&panel, &cx);
// Thread A should be idle (auto-completed via set_next_prompt_updates).
panel.read_with(&cx, |panel, cx| {
@@ -5491,21 +5569,25 @@ mod tests {
assert_eq!(thread.read(cx).status(), ThreadStatus::Idle);
});
- // Open a new thread B — thread A should NOT be retained.
+ // Open a new thread B — thread A should be retained because it is not loadable.
let connection_b = StubAgentConnection::new();
open_thread_with_connection(&panel, connection_b, &mut cx);
panel.read_with(&cx, |panel, _cx| {
+ assert_eq!(
+ panel.background_threads.len(),
+ 1,
+ "Idle non-loadable thread A should be retained in background_views"
+ );
assert!(
- panel.background_threads.is_empty(),
- "Idle thread A should not be retained in background_views"
+ panel.background_threads.contains_key(&session_id_a),
+ "Background view should be keyed by thread A's session ID"
);
});
- // Verify the old ConnectionView entity was dropped (no strong references remain).
assert!(
- weak_view_a.upgrade().is_none(),
- "Idle ConnectionView should have been dropped"
+ weak_view_a.upgrade().is_some(),
+ "Idle non-loadable ConnectionView should still be retained"
);
}
@@ -5566,8 +5648,152 @@ mod tests {
"Promoted thread A should no longer be in background_views"
);
assert!(
- !panel.background_threads.contains_key(&session_id_b),
- "Thread B (idle) should not have been retained in background_views"
+ panel.background_threads.contains_key(&session_id_b),
+ "Thread B (idle, non-loadable) should remain retained in background_views"
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_cleanup_background_threads_keeps_five_most_recent_idle_loadable_threads(
+ cx: &mut TestAppContext,
+ ) {
+ let (panel, mut cx) = setup_panel(cx).await;
+ let connection = StubAgentConnection::new()
+ .with_supports_load_session(true)
+ .with_agent_id("loadable-stub".into())
+ .with_telemetry_id("loadable-stub".into());
+ let mut session_ids = Vec::new();
+
+ for _ in 0..7 {
+ session_ids.push(open_generating_thread_with_loadable_connection(
+ &panel,
+ &connection,
+ &mut cx,
+ ));
+ }
+
+ let base_time = Instant::now();
+
+ for session_id in session_ids.iter().take(6) {
+ connection.end_turn(session_id.clone(), acp::StopReason::EndTurn);
+ }
+ cx.run_until_parked();
+
+ panel.update(&mut cx, |panel, cx| {
+ for (index, session_id) in session_ids.iter().take(6).enumerate() {
+ let conversation_view = panel
+ .background_threads
+ .get(session_id)
+ .expect("background thread should exist")
+ .clone();
+ conversation_view.update(cx, |view, cx| {
+ view.set_updated_at(base_time + Duration::from_secs(index as u64), cx);
+ });
+ }
+ panel.cleanup_background_threads(cx);
+ });
+
+ panel.read_with(&cx, |panel, _cx| {
+ assert_eq!(
+ panel.background_threads.len(),
+ 5,
+ "cleanup should keep at most five idle loadable background threads"
+ );
+ assert!(
+ !panel.background_threads.contains_key(&session_ids[0]),
+ "oldest idle loadable background thread should be removed"
+ );
+ for session_id in &session_ids[1..6] {
+ assert!(
+ panel.background_threads.contains_key(session_id),
+ "more recent idle loadable background threads should be retained"
+ );
+ }
+ assert!(
+ !panel.background_threads.contains_key(&session_ids[6]),
+ "the active thread should not also be stored as a background thread"
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_cleanup_background_threads_preserves_idle_non_loadable_threads(
+ cx: &mut TestAppContext,
+ ) {
+ let (panel, mut cx) = setup_panel(cx).await;
+
+ let non_loadable_connection = StubAgentConnection::new();
+ let non_loadable_session_id = open_idle_thread_with_non_loadable_connection(
+ &panel,
+ &non_loadable_connection,
+ &mut cx,
+ );
+
+ let loadable_connection = StubAgentConnection::new()
+ .with_supports_load_session(true)
+ .with_agent_id("loadable-stub".into())
+ .with_telemetry_id("loadable-stub".into());
+ let mut loadable_session_ids = Vec::new();
+
+ for _ in 0..7 {
+ loadable_session_ids.push(open_generating_thread_with_loadable_connection(
+ &panel,
+ &loadable_connection,
+ &mut cx,
+ ));
+ }
+
+ let base_time = Instant::now();
+
+ for session_id in loadable_session_ids.iter().take(6) {
+ loadable_connection.end_turn(session_id.clone(), acp::StopReason::EndTurn);
+ }
+ cx.run_until_parked();
+
+ panel.update(&mut cx, |panel, cx| {
+ for (index, session_id) in loadable_session_ids.iter().take(6).enumerate() {
+ let conversation_view = panel
+ .background_threads
+ .get(session_id)
+ .expect("background thread should exist")
+ .clone();
+ conversation_view.update(cx, |view, cx| {
+ view.set_updated_at(base_time + Duration::from_secs(index as u64), cx);
+ });
+ }
+ panel.cleanup_background_threads(cx);
+ });
+
+ panel.read_with(&cx, |panel, _cx| {
+ assert_eq!(
+ panel.background_threads.len(),
+ 6,
+ "cleanup should keep the non-loadable idle thread in addition to five loadable ones"
+ );
+ assert!(
+ panel
+ .background_threads
+ .contains_key(&non_loadable_session_id),
+ "idle non-loadable background threads should not be cleanup candidates"
+ );
+ assert!(
+ !panel
+ .background_threads
+ .contains_key(&loadable_session_ids[0]),
+ "oldest idle loadable background thread should still be removed"
+ );
+ for session_id in &loadable_session_ids[1..6] {
+ assert!(
+ panel.background_threads.contains_key(session_id),
+ "more recent idle loadable background threads should be retained"
+ );
+ }
+ assert!(
+ !panel
+ .background_threads
+ .contains_key(&loadable_session_ids[6]),
+ "the active loadable thread should not also be stored as a background thread"
);
});
}
@@ -5740,8 +5966,8 @@ mod tests {
});
// Change thread target to NewWorktree.
- panel.update(cx, |panel, cx| {
- panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.set_start_thread_in(&StartThreadIn::NewWorktree, window, cx);
});
panel.read_with(cx, |panel, _cx| {
@@ -5963,11 +6189,11 @@ mod tests {
// Set the selected agent to Codex (a custom agent) and start_thread_in
// to NewWorktree. We do this AFTER opening the thread because
// open_external_thread_with_server overrides selected_agent_type.
- panel.update(cx, |panel, cx| {
+ panel.update_in(cx, |panel, window, cx| {
panel.selected_agent_type = AgentType::Custom {
id: CODEX_ID.into(),
};
- panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx);
+ panel.set_start_thread_in(&StartThreadIn::NewWorktree, window, cx);
});
// Verify the panel has the Codex agent selected.
@@ -403,6 +403,22 @@ impl AgentRegistryPage {
})
});
+ let website_button = agent.website().map(|website| {
+ let website = website.clone();
+ let website_for_click = website.clone();
+ IconButton::new(
+ SharedString::from(format!("agent-website-{}", agent.id())),
+ IconName::Link,
+ )
+ .icon_size(IconSize::Small)
+ .tooltip(move |_, cx| {
+ Tooltip::with_meta("Visit Agent Website", None, website.clone(), cx)
+ })
+ .on_click(move |_, _, cx| {
+ cx.open_url(&website_for_click);
+ })
+ });
+
AgentRegistryCard::new()
.child(
h_flex()
@@ -441,7 +457,8 @@ impl AgentRegistryPage {
.color(Color::Muted)
.truncate(),
)
- .when_some(repository_button, |this, button| this.child(button)),
+ .when_some(repository_button, |this, button| this.child(button))
+ .when_some(website_button, |this, button| this.child(button)),
),
)
}
@@ -194,6 +194,29 @@ pub struct AuthorizeToolCall {
pub option_kind: String,
}
+/// Action to select a permission granularity option from the dropdown.
+/// This updates the selected granularity without triggering authorization.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = agent)]
+#[serde(deny_unknown_fields)]
+pub struct SelectPermissionGranularity {
+ /// The tool call ID for which to select the granularity.
+ pub tool_call_id: String,
+ /// The index of the selected granularity option.
+ pub index: usize,
+}
+
+/// Action to toggle a command pattern checkbox in the permission dropdown.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = agent)]
+#[serde(deny_unknown_fields)]
+pub struct ToggleCommandPattern {
+ /// The tool call ID for which to toggle the pattern.
+ pub tool_call_id: String,
+ /// The index of the command pattern to toggle.
+ pub pattern_index: usize,
+}
+
/// Creates a new conversation thread, optionally based on an existing thread.
#[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
@@ -228,6 +251,13 @@ pub enum Agent {
}
impl Agent {
+ pub fn id(&self) -> AgentId {
+ match self {
+ Self::NativeAgent => agent::ZED_AGENT_ID.clone(),
+ Self::Custom { id } => id.clone(),
+ }
+ }
+
pub fn server(
&self,
fs: Arc<dyn fs::Fs>,
@@ -1691,27 +1691,34 @@ impl MentionCompletion {
offset_to_line: usize,
supported_modes: &[PromptContextType],
) -> Option<Self> {
- let last_mention_start = line.rfind('@')?;
+ // Find the rightmost '@' that has a word boundary before it and no whitespace immediately after
+ let mut last_mention_start = None;
+ for (idx, _) in line.rmatch_indices('@') {
+ // No whitespace immediately after '@'
+ if line[idx + 1..]
+ .chars()
+ .next()
+ .is_some_and(|c| c.is_whitespace())
+ {
+ continue;
+ }
- // No whitespace immediately after '@'
- if line[last_mention_start + 1..]
- .chars()
- .next()
- .is_some_and(|c| c.is_whitespace())
- {
- return None;
- }
+ // Must be a word boundary before '@'
+ if idx > 0
+ && line[..idx]
+ .chars()
+ .last()
+ .is_some_and(|c| !c.is_whitespace())
+ {
+ continue;
+ }
- // Must be a word boundary before '@'
- if last_mention_start > 0
- && line[..last_mention_start]
- .chars()
- .last()
- .is_some_and(|c| !c.is_whitespace())
- {
- return None;
+ last_mention_start = Some(idx);
+ break;
}
+ let last_mention_start = last_mention_start?;
+
let rest_of_line = &line[last_mention_start + 1..];
let mut mode = None;
@@ -2488,6 +2495,48 @@ mod tests {
None,
"Should not parse with a space after @ at the start of the line"
);
+
+ assert_eq!(
+ MentionCompletion::try_parse(
+ "@fetch https://www.npmjs.com/package/@matterport/sdk",
+ 0,
+ &[PromptContextType::Fetch]
+ ),
+ Some(MentionCompletion {
+ source_range: 0..52,
+ mode: Some(PromptContextType::Fetch),
+ argument: Some("https://www.npmjs.com/package/@matterport/sdk".to_string()),
+ }),
+ "Should handle URLs with @ in the path"
+ );
+
+ assert_eq!(
+ MentionCompletion::try_parse(
+ "@fetch https://example.com/@org/@repo/file",
+ 0,
+ &[PromptContextType::Fetch]
+ ),
+ Some(MentionCompletion {
+ source_range: 0..42,
+ mode: Some(PromptContextType::Fetch),
+ argument: Some("https://example.com/@org/@repo/file".to_string()),
+ }),
+ "Should handle URLs with multiple @ characters"
+ );
+
+ assert_eq!(
+ MentionCompletion::try_parse(
+ "@fetch https://example.com/@",
+ 0,
+ &[PromptContextType::Fetch]
+ ),
+ Some(MentionCompletion {
+ source_range: 0..28,
+ mode: Some(PromptContextType::Fetch),
+ argument: Some("https://example.com/@".to_string()),
+ }),
+ "Should parse URL ending with @ (even if URL is incomplete)"
+ );
}
#[gpui::test]
@@ -1,19 +1,19 @@
use acp_thread::{
AcpThread, AcpThreadEvent, AgentSessionInfo, AgentThreadEntry, AssistantMessage,
AssistantMessageChunk, AuthRequired, LoadError, MentionUri, PermissionOptionChoice,
- PermissionOptions, RetryStatus, ThreadStatus, ToolCall, ToolCallContent, ToolCallStatus,
+ PermissionOptions, PermissionPattern, RetryStatus, SelectedPermissionOutcome,
+ SelectedPermissionParams, ThreadStatus, ToolCall, ToolCallContent, ToolCallStatus,
UserMessageId,
};
use acp_thread::{AgentConnection, Plan};
use action_log::{ActionLog, ActionLogTelemetry, DiffStats};
use agent::{NativeAgentServer, NativeAgentSessionList, SharedThread, ThreadStore};
-use agent_client_protocol::{self as acp, PromptCapabilities};
-use agent_servers::AgentServer;
+use agent_client_protocol as acp;
#[cfg(test)]
use agent_servers::AgentServerDelegate;
+use agent_servers::{AgentServer, GEMINI_TERMINAL_AUTH_METHOD_ID};
use agent_settings::{AgentProfileId, AgentSettings};
use anyhow::{Result, anyhow};
-use arrayvec::ArrayVec;
use audio::{Audio, Sound};
use buffer_diff::BufferDiff;
use client::zed_urls;
@@ -36,11 +36,13 @@ use gpui::{
use language::Buffer;
use language_model::LanguageModelRegistry;
use markdown::{Markdown, MarkdownElement, MarkdownFont, MarkdownStyle};
+use parking_lot::RwLock;
use project::{AgentId, AgentServerStore, Project, ProjectEntryId};
use prompt_store::{PromptId, PromptStore};
+
+use crate::message_editor::SessionCapabilities;
use rope::Point;
use settings::{NotifyWhenAgentWaiting, Settings as _, SettingsStore};
-use std::cell::RefCell;
use std::path::Path;
use std::sync::Arc;
use std::time::Instant;
@@ -75,7 +77,7 @@ use crate::agent_diff::AgentDiff;
use crate::entry_view_state::{EntryViewEvent, ViewEvent};
use crate::message_editor::{MessageEditor, MessageEditorEvent};
use crate::profile_selector::{ProfileProvider, ProfileSelector};
-use crate::thread_metadata_store::ThreadMetadataStore;
+use crate::thread_metadata_store::SidebarThreadMetadataStore;
use crate::ui::{AgentNotification, AgentNotificationEvent};
use crate::{
Agent, AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce,
@@ -162,73 +164,51 @@ pub(crate) struct Conversation {
threads: HashMap<acp::SessionId, Entity<AcpThread>>,
permission_requests: IndexMap<acp::SessionId, Vec<acp::ToolCallId>>,
subscriptions: Vec<Subscription>,
- /// Tracks the selected granularity index for each tool call's permission dropdown.
- /// The index corresponds to the position in the allow_options list.
- selected_permission_granularity: HashMap<acp::SessionId, HashMap<acp::ToolCallId, usize>>,
+ updated_at: Option<Instant>,
}
impl Conversation {
pub fn register_thread(&mut self, thread: Entity<AcpThread>, cx: &mut Context<Self>) {
let session_id = thread.read(cx).session_id().clone();
- let subscription = cx.subscribe(&thread, move |this, _thread, event, _cx| match event {
- AcpThreadEvent::ToolAuthorizationRequested(id) => {
- this.permission_requests
- .entry(session_id.clone())
- .or_default()
- .push(id.clone());
- }
- AcpThreadEvent::ToolAuthorizationReceived(id) => {
- if let Some(tool_calls) = this.permission_requests.get_mut(&session_id) {
- tool_calls.retain(|tool_call_id| tool_call_id != id);
- if tool_calls.is_empty() {
- this.permission_requests.shift_remove(&session_id);
+ let subscription = cx.subscribe(&thread, move |this, _thread, event, _cx| {
+ this.updated_at = Some(Instant::now());
+ match event {
+ AcpThreadEvent::ToolAuthorizationRequested(id) => {
+ this.permission_requests
+ .entry(session_id.clone())
+ .or_default()
+ .push(id.clone());
+ }
+ AcpThreadEvent::ToolAuthorizationReceived(id) => {
+ if let Some(tool_calls) = this.permission_requests.get_mut(&session_id) {
+ tool_calls.retain(|tool_call_id| tool_call_id != id);
+ if tool_calls.is_empty() {
+ this.permission_requests.shift_remove(&session_id);
+ }
}
}
+ AcpThreadEvent::NewEntry
+ | AcpThreadEvent::TitleUpdated
+ | AcpThreadEvent::TokenUsageUpdated
+ | AcpThreadEvent::EntryUpdated(_)
+ | AcpThreadEvent::EntriesRemoved(_)
+ | AcpThreadEvent::Retry(_)
+ | AcpThreadEvent::SubagentSpawned(_)
+ | AcpThreadEvent::Stopped(_)
+ | AcpThreadEvent::Error
+ | AcpThreadEvent::LoadError(_)
+ | AcpThreadEvent::PromptCapabilitiesUpdated
+ | AcpThreadEvent::Refusal
+ | AcpThreadEvent::AvailableCommandsUpdated(_)
+ | AcpThreadEvent::ModeUpdated(_)
+ | AcpThreadEvent::ConfigOptionsUpdated(_) => {}
}
- AcpThreadEvent::NewEntry
- | AcpThreadEvent::TitleUpdated
- | AcpThreadEvent::TokenUsageUpdated
- | AcpThreadEvent::EntryUpdated(_)
- | AcpThreadEvent::EntriesRemoved(_)
- | AcpThreadEvent::Retry(_)
- | AcpThreadEvent::SubagentSpawned(_)
- | AcpThreadEvent::Stopped(_)
- | AcpThreadEvent::Error
- | AcpThreadEvent::LoadError(_)
- | AcpThreadEvent::PromptCapabilitiesUpdated
- | AcpThreadEvent::Refusal
- | AcpThreadEvent::AvailableCommandsUpdated(_)
- | AcpThreadEvent::ModeUpdated(_)
- | AcpThreadEvent::ConfigOptionsUpdated(_) => {}
});
self.subscriptions.push(subscription);
self.threads
.insert(thread.read(cx).session_id().clone(), thread);
}
- pub fn selected_permission_granularity(
- &self,
- session_id: &acp::SessionId,
- tool_call_id: &acp::ToolCallId,
- ) -> Option<usize> {
- self.selected_permission_granularity
- .get(session_id)
- .and_then(|map| map.get(tool_call_id))
- .copied()
- }
-
- pub fn set_selected_permission_granularity(
- &mut self,
- session_id: acp::SessionId,
- tool_call_id: acp::ToolCallId,
- granularity: usize,
- ) {
- self.selected_permission_granularity
- .entry(session_id)
- .or_default()
- .insert(tool_call_id, granularity);
- }
-
pub fn pending_tool_call<'a>(
&'a self,
session_id: &acp::SessionId,
@@ -268,7 +248,7 @@ impl Conversation {
self.authorize_tool_call(
session_id.clone(),
tool_call_id,
- option.option_id.clone(),
+ option.option_id.clone().into(),
option.kind,
cx,
);
@@ -279,7 +259,7 @@ impl Conversation {
&mut self,
session_id: acp::SessionId,
tool_call_id: acp::ToolCallId,
- option_id: acp::PermissionOptionId,
+ outcome: SelectedPermissionOutcome,
option_kind: acp::PermissionOptionKind,
cx: &mut Context<Self>,
) {
@@ -296,7 +276,7 @@ impl Conversation {
);
thread.update(cx, |thread, cx| {
- thread.authorize_tool_call(tool_call_id, option_id, option_kind, cx);
+ thread.authorize_tool_call(tool_call_id, outcome, option_kind, cx);
});
cx.notify();
}
@@ -350,7 +330,7 @@ impl ConversationView {
.pending_tool_call(id, cx)
}
- pub fn parent_thread(&self, cx: &App) -> Option<Entity<ThreadView>> {
+ pub fn root_thread(&self, cx: &App) -> Option<Entity<ThreadView>> {
match &self.server_state {
ServerState::Connected(connected) => {
let mut current = connected.active_view()?;
@@ -386,6 +366,11 @@ impl ConversationView {
}
}
+ pub fn updated_at(&self, cx: &App) -> Option<Instant> {
+ self.as_connected()
+ .and_then(|connected| connected.conversation.read(cx).updated_at)
+ }
+
pub fn navigate_to_session(
&mut self,
session_id: acp::SessionId,
@@ -588,11 +573,7 @@ impl ConversationView {
if let Some(view) = self.active_thread() {
view.update(cx, |this, cx| {
this.message_editor.update(cx, |editor, cx| {
- editor.set_command_state(
- this.prompt_capabilities.clone(),
- this.available_commands.clone(),
- cx,
- );
+ editor.set_session_capabilities(this.session_capabilities.clone(), cx);
});
});
}
@@ -621,32 +602,7 @@ impl ConversationView {
session_id: resume_session_id.clone(),
};
}
- let mut worktrees = project.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
- // Pick the first non-single-file worktree for the root directory if there are any,
- // and otherwise the parent of a single-file worktree, falling back to $HOME if there are no visible worktrees.
- worktrees.sort_by(|l, r| {
- l.read(cx)
- .is_single_file()
- .cmp(&r.read(cx).is_single_file())
- });
- let worktree_roots: Vec<Arc<Path>> = worktrees
- .iter()
- .filter_map(|worktree| {
- let worktree = worktree.read(cx);
- if worktree.is_single_file() {
- Some(worktree.abs_path().parent()?.into())
- } else {
- Some(worktree.abs_path())
- }
- })
- .collect();
- let session_work_dirs = work_dirs.unwrap_or_else(|| {
- if worktree_roots.is_empty() {
- PathList::new(&[paths::home_dir().as_path()])
- } else {
- PathList::new(&worktree_roots)
- }
- });
+ let session_work_dirs = work_dirs.unwrap_or_else(|| project.read(cx).default_path_list(cx));
let connection_entry = connection_store.update(cx, |store, cx| {
store.request_connection(connection_key, agent.clone(), cx)
@@ -821,13 +777,13 @@ impl ConversationView {
cx: &mut Context<Self>,
) -> Entity<ThreadView> {
let agent_id = self.agent.agent_id();
- let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
- let available_commands = Rc::new(RefCell::new(vec![]));
+ let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
+ thread.read(cx).prompt_capabilities(),
+ vec![],
+ )));
let action_log = thread.read(cx).action_log().clone();
- prompt_capabilities.replace(thread.read(cx).prompt_capabilities());
-
let entry_view_state = cx.new(|_| {
EntryViewState::new(
self.workspace.clone(),
@@ -835,8 +791,7 @@ impl ConversationView {
self.thread_store.clone(),
history.as_ref().map(|h| h.downgrade()),
self.prompt_store.clone(),
- prompt_capabilities.clone(),
- available_commands.clone(),
+ session_capabilities.clone(),
self.agent.agent_id(),
)
});
@@ -995,8 +950,7 @@ impl ConversationView {
model_selector,
profile_selector,
list_state,
- prompt_capabilities,
- available_commands,
+ session_capabilities,
resumed_without_history,
self.project.downgrade(),
self.thread_store.clone(),
@@ -1181,7 +1135,7 @@ impl ConversationView {
pub fn parent_id(&self, cx: &App) -> Option<acp::SessionId> {
match &self.server_state {
ServerState::Connected(_) => self
- .parent_thread(cx)
+ .root_thread(cx)
.map(|thread| thread.read(cx).id.clone()),
ServerState::Loading(loading) => loading.read(cx).session_id.clone(),
ServerState::LoadError { session_id, .. } => session_id.clone(),
@@ -1411,8 +1365,9 @@ impl ConversationView {
if let Some(active) = self.thread_view(&thread_id) {
active.update(cx, |active, _cx| {
active
- .prompt_capabilities
- .replace(thread.read(_cx).prompt_capabilities());
+ .session_capabilities
+ .write()
+ .set_prompt_capabilities(thread.read(_cx).prompt_capabilities());
});
}
}
@@ -1437,7 +1392,10 @@ impl ConversationView {
let has_commands = !available_commands.is_empty();
if let Some(active) = self.active_thread() {
active.update(cx, |active, _cx| {
- active.available_commands.replace(available_commands);
+ active
+ .session_capabilities
+ .write()
+ .set_available_commands(available_commands);
});
}
@@ -1475,6 +1433,9 @@ impl ConversationView {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return;
+ };
let Some(connected) = self.as_connected_mut() else {
return;
};
@@ -1491,119 +1452,65 @@ impl ConversationView {
let agent_telemetry_id = connection.telemetry_id();
- // Check for the experimental "terminal-auth" _meta field
- let auth_method = connection.auth_methods().iter().find(|m| m.id() == &method);
+ if let Some(login) = connection.terminal_auth_task(&method, cx) {
+ configuration_view.take();
+ pending_auth_method.replace(method.clone());
- if let Some(terminal_auth) = auth_method
- .and_then(|a| match a {
- acp::AuthMethod::EnvVar(env_var) => env_var.meta.as_ref(),
- acp::AuthMethod::Terminal(terminal) => terminal.meta.as_ref(),
- acp::AuthMethod::Agent(agent) => agent.meta.as_ref(),
- _ => None,
- })
- .and_then(|m| m.get("terminal-auth"))
- {
- // Extract terminal auth details from meta
- if let (Some(command), Some(label)) = (
- terminal_auth.get("command").and_then(|v| v.as_str()),
- terminal_auth.get("label").and_then(|v| v.as_str()),
- ) {
- let args = terminal_auth
- .get("args")
- .and_then(|v| v.as_array())
- .map(|arr| {
- arr.iter()
- .filter_map(|v| v.as_str().map(String::from))
- .collect()
- })
- .unwrap_or_default();
-
- let env = terminal_auth
- .get("env")
- .and_then(|v| v.as_object())
- .map(|obj| {
- obj.iter()
- .filter_map(|(k, v)| v.as_str().map(|val| (k.clone(), val.to_string())))
- .collect::<HashMap<String, String>>()
- })
- .unwrap_or_default();
-
- // Build SpawnInTerminal from _meta
- let login = task::SpawnInTerminal {
- id: task::TaskId(format!("external-agent-{}-login", label)),
- full_label: label.to_string(),
- label: label.to_string(),
- command: Some(command.to_string()),
- args,
- command_label: label.to_string(),
- env,
- use_new_terminal: true,
- allow_concurrent_runs: true,
- hide: task::HideStrategy::Always,
- ..Default::default()
- };
-
- configuration_view.take();
- pending_auth_method.replace(method.clone());
-
- if let Some(workspace) = self.workspace.upgrade() {
- let project = self.project.clone();
- let authenticate = Self::spawn_external_agent_login(
- login,
- workspace,
- project,
- method.clone(),
- false,
- window,
- cx,
- );
- cx.notify();
- self.auth_task = Some(cx.spawn_in(window, {
- async move |this, cx| {
- let result = authenticate.await;
-
- match &result {
- Ok(_) => telemetry::event!(
- "Authenticate Agent Succeeded",
- agent = agent_telemetry_id
- ),
- Err(_) => {
- telemetry::event!(
- "Authenticate Agent Failed",
- agent = agent_telemetry_id,
- )
- }
- }
+ let project = self.project.clone();
+ let authenticate = Self::spawn_external_agent_login(
+ login,
+ workspace,
+ project,
+ method.clone(),
+ false,
+ window,
+ cx,
+ );
+ cx.notify();
+ self.auth_task = Some(cx.spawn_in(window, {
+ async move |this, cx| {
+ let result = authenticate.await;
+
+ match &result {
+ Ok(_) => telemetry::event!(
+ "Authenticate Agent Succeeded",
+ agent = agent_telemetry_id
+ ),
+ Err(_) => {
+ telemetry::event!(
+ "Authenticate Agent Failed",
+ agent = agent_telemetry_id,
+ )
+ }
+ }
- this.update_in(cx, |this, window, cx| {
- if let Err(err) = result {
- if let Some(ConnectedServerState {
- auth_state:
- AuthState::Unauthenticated {
- pending_auth_method,
- ..
- },
+ this.update_in(cx, |this, window, cx| {
+ if let Err(err) = result {
+ if let Some(ConnectedServerState {
+ auth_state:
+ AuthState::Unauthenticated {
+ pending_auth_method,
..
- }) = this.as_connected_mut()
- {
- pending_auth_method.take();
- }
- if let Some(active) = this.active_thread() {
- active.update(cx, |active, cx| {
- active.handle_thread_error(err, cx);
- })
- }
- } else {
- this.reset(window, cx);
- }
- this.auth_task.take()
- })
- .ok();
+ },
+ ..
+ }) = this.as_connected_mut()
+ {
+ pending_auth_method.take();
+ }
+ if let Some(active) = this.active_thread() {
+ active.update(cx, |active, cx| {
+ active.handle_thread_error(err, cx);
+ })
+ }
+ } else {
+ this.reset(window, cx);
}
- }));
+ this.auth_task.take()
+ })
+ .ok();
}
- return;
- }
+ }));
+ return;
}
configuration_view.take();
@@ -1675,7 +1582,7 @@ impl ConversationView {
.read(cx)
.work_dirs()
.cloned()
- .unwrap_or_else(|| PathList::new(&[paths::home_dir().as_path()]));
+ .unwrap_or_else(|| self.project.read(cx).default_path_list(cx));
let subagent_thread_task = connected.connection.clone().load_session(
subagent_id.clone(),
@@ -1726,7 +1633,7 @@ impl ConversationView {
cx: &mut App,
) -> Task<Result<()>> {
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
- return Task::ready(Ok(()));
+ return Task::ready(Err(anyhow!("Terminal panel is unavailable")));
};
window.spawn(cx, async move |cx| {
@@ -1734,17 +1641,14 @@ impl ConversationView {
if let Some(cmd) = &task.command {
// Have "node" command use Zed's managed Node runtime by default
if cmd == "node" {
- let resolved_node_runtime = project
- .update(cx, |project, cx| {
- let agent_server_store = project.agent_server_store().clone();
- agent_server_store.update(cx, |store, cx| {
- store.node_runtime().map(|node_runtime| {
- cx.background_spawn(async move {
- node_runtime.binary_path().await
- })
- })
+ let resolved_node_runtime = project.update(cx, |project, cx| {
+ let agent_server_store = project.agent_server_store().clone();
+ agent_server_store.update(cx, |store, cx| {
+ store.node_runtime().map(|node_runtime| {
+ cx.background_spawn(async move { node_runtime.binary_path().await })
})
- });
+ })
+ });
if let Some(resolve_task) = resolved_node_runtime {
if let Ok(node_path) = resolve_task.await {
@@ -1756,14 +1660,8 @@ impl ConversationView {
task.shell = task::Shell::WithArguments {
program: task.command.take().expect("login command should be set"),
args: std::mem::take(&mut task.args),
- title_override: None
+ title_override: None,
};
- task.full_label = task.label.clone();
- task.id = task::TaskId(format!("external-agent-{}-login", task.label));
- task.command_label = task.label.clone();
- task.use_new_terminal = true;
- task.allow_concurrent_runs = true;
- task.hide = task::HideStrategy::Always;
let terminal = terminal_panel
.update_in(cx, |terminal_panel, window, cx| {
@@ -1772,7 +1670,7 @@ impl ConversationView {
.await?;
let success_patterns = match method.0.as_ref() {
- "claude-login" | "spawn-gemini-cli" => vec![
+ "claude-login" | GEMINI_TERMINAL_AUTH_METHOD_ID => vec![
"Login successful".to_string(),
"Type your message".to_string(),
],
@@ -1806,7 +1704,9 @@ impl ConversationView {
cx.background_executor().timer(Duration::from_secs(1)).await;
let content =
terminal.update(cx, |terminal, _cx| terminal.get_content())?;
- if success_patterns.iter().any(|pattern| content.contains(pattern))
+ if success_patterns
+ .iter()
+ .any(|pattern| content.contains(pattern))
{
return anyhow::Ok(());
}
@@ -1823,8 +1723,23 @@ impl ConversationView {
}
}
_ = exit_status => {
- if !previous_attempt && project.read_with(cx, |project, _| project.is_via_remote_server()) && login.label.contains("gemini") {
- return cx.update(|window, cx| Self::spawn_external_agent_login(login, workspace, project.clone(), method, true, window, cx))?.await
+ if !previous_attempt
+ && project.read_with(cx, |project, _| project.is_via_remote_server())
+ && method.0.as_ref() == GEMINI_TERMINAL_AUTH_METHOD_ID
+ {
+ return cx
+ .update(|window, cx| {
+ Self::spawn_external_agent_login(
+ login,
+ workspace,
+ project.clone(),
+ method,
+ true,
+ window,
+ cx,
+ )
+ })?
+ .await;
}
return Err(anyhow!("exited before logging in"));
}
@@ -2217,8 +2132,7 @@ impl ConversationView {
let Some(thread) = connected.active_view() else {
return;
};
- let prompt_capabilities = thread.read(cx).prompt_capabilities.clone();
- let available_commands = thread.read(cx).available_commands.clone();
+ let session_capabilities = thread.read(cx).session_capabilities.clone();
let current_count = thread.read(cx).queued_message_editors.len();
let last_synced = thread.read(cx).last_synced_queue_length;
@@ -2257,8 +2171,7 @@ impl ConversationView {
None,
history.clone(),
None,
- prompt_capabilities.clone(),
- available_commands.clone(),
+ session_capabilities.clone(),
agent_name.clone(),
"",
EditorMode::AutoHeight {
@@ -2615,7 +2528,7 @@ impl ConversationView {
let task = history.update(cx, |history, cx| history.delete_session(&session_id, cx));
task.detach_and_log_err(cx);
- if let Some(store) = ThreadMetadataStore::try_global(cx) {
+ if let Some(store) = SidebarThreadMetadataStore::try_global(cx) {
store
.update(cx, |store, cx| store.delete(session_id.clone(), cx))
.detach_and_log_err(cx);
@@ -2665,6 +2578,17 @@ impl ConversationView {
cx.notify();
}
}
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn set_updated_at(&mut self, updated_at: Instant, cx: &mut Context<Self>) {
+ let Some(connected) = self.as_connected_mut() else {
+ return;
+ };
+
+ connected.conversation.update(cx, |conversation, _cx| {
+ conversation.updated_at = Some(updated_at);
+ });
+ }
}
impl Render for ConversationView {
@@ -3696,6 +3620,7 @@ pub(crate) mod tests {
fn connect(
&self,
_delegate: AgentServerDelegate,
+ _project: Entity<Project>,
_cx: &mut App,
) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
Task::ready(Ok(Rc::new(self.connection.clone())))
@@ -3720,6 +3645,7 @@ pub(crate) mod tests {
fn connect(
&self,
_delegate: AgentServerDelegate,
+ _project: Entity<Project>,
_cx: &mut App,
) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
Task::ready(Err(anyhow!(
@@ -4291,7 +4217,7 @@ pub(crate) mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- ThreadMetadataStore::init_global(cx);
+ SidebarThreadMetadataStore::init_global(cx);
theme::init(theme::LoadThemes::JustBase, cx);
editor::init(cx);
agent_panel::init(cx);
@@ -5827,17 +5753,11 @@ pub(crate) mod tests {
cx.run_until_parked();
- // Find the pattern option ID
+ // Find the pattern option ID (the choice with non-empty sub_patterns)
let pattern_option = match &permission_options {
PermissionOptions::Dropdown(choices) => choices
.iter()
- .find(|choice| {
- choice
- .allow
- .option_id
- .0
- .starts_with("always_allow_pattern:")
- })
+ .find(|choice| !choice.sub_patterns.is_empty())
.map(|choice| &choice.allow)
.expect("Should have a pattern option for npm command"),
_ => panic!("Expected dropdown permission options"),
@@ -5868,6 +5788,181 @@ pub(crate) mod tests {
});
}
+ #[gpui::test]
+ async fn test_granularity_selection_updates_state(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let tool_call_id = acp::ToolCallId::new("granularity-test-1");
+ let tool_call =
+ acp::ToolCall::new(tool_call_id.clone(), "Run `cargo build`").kind(acp::ToolKind::Edit);
+
+ let permission_options =
+ ToolPermissionContext::new(TerminalTool::NAME, vec!["cargo build".to_string()])
+ .build_permission_options();
+
+ let connection =
+ StubAgentConnection::new().with_permission_requests(HashMap::from_iter([(
+ tool_call_id.clone(),
+ permission_options.clone(),
+ )]));
+
+ connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]);
+
+ let (thread_view, cx) = setup_conversation_view(StubAgentServer::new(connection), cx).await;
+ add_to_workspace(thread_view.clone(), cx);
+
+ cx.update(|_window, cx| {
+ AgentSettings::override_global(
+ AgentSettings {
+ notify_when_agent_waiting: NotifyWhenAgentWaiting::Never,
+ ..AgentSettings::get_global(cx).clone()
+ },
+ cx,
+ );
+ });
+
+ let message_editor = message_editor(&thread_view, cx);
+ message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("Build the project", window, cx);
+ });
+
+ active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx));
+
+ cx.run_until_parked();
+
+ // Verify default granularity is the last option (index 2 = "Only this time")
+ thread_view.read_with(cx, |thread_view, cx| {
+ let state = thread_view.active_thread().unwrap();
+ let selected = state.read(cx).permission_selections.get(&tool_call_id);
+ assert!(
+ selected.is_none(),
+ "Should have no selection initially (defaults to last)"
+ );
+ });
+
+ // Select the first option (index 0 = "Always for terminal")
+ thread_view.update_in(cx, |_, window, cx| {
+ window.dispatch_action(
+ crate::SelectPermissionGranularity {
+ tool_call_id: "granularity-test-1".to_string(),
+ index: 0,
+ }
+ .boxed_clone(),
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+
+ // Verify the selection was updated
+ thread_view.read_with(cx, |thread_view, cx| {
+ let state = thread_view.active_thread().unwrap();
+ let selected = state.read(cx).permission_selections.get(&tool_call_id);
+ assert_eq!(
+ selected.and_then(|s| s.choice_index()),
+ Some(0),
+ "Should have selected index 0"
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_allow_button_uses_selected_granularity(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let tool_call_id = acp::ToolCallId::new("allow-granularity-test-1");
+ let tool_call =
+ acp::ToolCall::new(tool_call_id.clone(), "Run `npm install`").kind(acp::ToolKind::Edit);
+
+ let permission_options =
+ ToolPermissionContext::new(TerminalTool::NAME, vec!["npm install".to_string()])
+ .build_permission_options();
+
+ // Verify we have the expected options
+ let PermissionOptions::Dropdown(choices) = &permission_options else {
+ panic!("Expected dropdown permission options");
+ };
+
+ assert_eq!(choices.len(), 3);
+ assert!(
+ choices[0]
+ .allow
+ .option_id
+ .0
+ .contains("always_allow:terminal")
+ );
+ assert!(
+ choices[1]
+ .allow
+ .option_id
+ .0
+ .contains("always_allow:terminal")
+ );
+ assert!(!choices[1].sub_patterns.is_empty());
+ assert_eq!(choices[2].allow.option_id.0.as_ref(), "allow");
+
+ let connection =
+ StubAgentConnection::new().with_permission_requests(HashMap::from_iter([(
+ tool_call_id.clone(),
+ permission_options.clone(),
+ )]));
+
+ connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]);
+
+ let (thread_view, cx) = setup_conversation_view(StubAgentServer::new(connection), cx).await;
+ add_to_workspace(thread_view.clone(), cx);
+
+ cx.update(|_window, cx| {
+ AgentSettings::override_global(
+ AgentSettings {
+ notify_when_agent_waiting: NotifyWhenAgentWaiting::Never,
+ ..AgentSettings::get_global(cx).clone()
+ },
+ cx,
+ );
+ });
+
+ let message_editor = message_editor(&thread_view, cx);
+ message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("Install dependencies", window, cx);
+ });
+
+ active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx));
+
+ cx.run_until_parked();
+
+ // Select the pattern option (index 1 = "Always for `npm` commands")
+ thread_view.update_in(cx, |_, window, cx| {
+ window.dispatch_action(
+ crate::SelectPermissionGranularity {
+ tool_call_id: "allow-granularity-test-1".to_string(),
+ index: 1,
+ }
+ .boxed_clone(),
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+
+ // Simulate clicking the Allow button by dispatching AllowOnce action
+ // which should use the selected granularity
+ active_thread(&thread_view, cx).update_in(cx, |view, window, cx| {
+ view.allow_once(&AllowOnce, window, cx)
+ });
+
+ cx.run_until_parked();
+
+ // Verify tool call was authorized
+ thread_view.read_with(cx, |thread_view, cx| {
+ let tool_call = thread_view.pending_tool_call(cx);
+ assert!(
+ tool_call.is_none(),
+ "Tool call should be authorized after Allow with pattern granularity"
+ );
+ });
+ }
+
#[gpui::test]
async fn test_deny_button_uses_selected_granularity(cx: &mut TestAppContext) {
init_test(cx);
@@ -5947,13 +6042,14 @@ pub(crate) mod tests {
.map(|choice| choice.allow.option_id.0.to_string())
.collect();
- assert!(allow_ids.contains(&"always_allow:terminal".to_string()));
assert!(allow_ids.contains(&"allow".to_string()));
- assert!(
+ assert_eq!(
allow_ids
.iter()
- .any(|id| id.starts_with("always_allow_pattern:terminal\n")),
- "Missing allow pattern option"
+ .filter(|id| *id == "always_allow:terminal")
+ .count(),
+ 2,
+ "Expected two always_allow:terminal IDs (one whole-tool, one pattern with sub_patterns)"
);
}
@@ -5974,13 +6070,14 @@ pub(crate) mod tests {
.map(|choice| choice.deny.option_id.0.to_string())
.collect();
- assert!(deny_ids.contains(&"always_deny:terminal".to_string()));
assert!(deny_ids.contains(&"deny".to_string()));
- assert!(
+ assert_eq!(
deny_ids
.iter()
- .any(|id| id.starts_with("always_deny_pattern:terminal\n")),
- "Missing deny pattern option"
+ .filter(|id| *id == "always_deny:terminal")
+ .count(),
+ 2,
+ "Expected two always_deny:terminal IDs (one whole-tool, one pattern with sub_patterns)"
);
}
@@ -6115,7 +6212,7 @@ pub(crate) mod tests {
tool_call_id: &str,
option_id: &str,
cx: &mut TestAppContext,
- ) -> Task<acp::RequestPermissionOutcome> {
+ ) -> Task<acp_thread::RequestPermissionOutcome> {
let tool_call_id = acp::ToolCallId::new(tool_call_id);
let label = format!("Tool {tool_call_id}");
let option_id = acp::PermissionOptionId::new(option_id);
@@ -6174,7 +6271,7 @@ pub(crate) mod tests {
conversation.authorize_tool_call(
acp::SessionId::new("session-1"),
acp::ToolCallId::new("tc-1"),
- acp::PermissionOptionId::new("allow-1"),
+ acp::PermissionOptionId::new("allow-1").into(),
acp::PermissionOptionKind::AllowOnce,
cx,
);
@@ -6197,7 +6294,7 @@ pub(crate) mod tests {
conversation.authorize_tool_call(
acp::SessionId::new("session-1"),
acp::ToolCallId::new("tc-2"),
- acp::PermissionOptionId::new("allow-2"),
+ acp::PermissionOptionId::new("allow-2").into(),
acp::PermissionOptionKind::AllowOnce,
cx,
);
@@ -6336,7 +6433,7 @@ pub(crate) mod tests {
conversation.authorize_tool_call(
acp::SessionId::new("thread-a"),
acp::ToolCallId::new("tc-a"),
- acp::PermissionOptionId::new("allow-a"),
+ acp::PermissionOptionId::new("allow-a").into(),
acp::PermissionOptionKind::AllowOnce,
cx,
);
@@ -1,9 +1,14 @@
+use crate::SelectPermissionGranularity;
+use std::cell::RefCell;
+
use acp_thread::ContentBlock;
use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody};
use editor::actions::OpenExcerpts;
use crate::StartThreadIn;
+use crate::message_editor::SharedSessionCapabilities;
use gpui::{Corner, List};
+use heapless::Vec as ArrayVec;
use language_model::{LanguageModelEffortLevel, Speed};
use settings::update_settings_file;
use ui::{ButtonLike, SplitButton, SplitButtonStyle, Tab};
@@ -162,6 +167,56 @@ pub enum AcpThreadViewEvent {
impl EventEmitter<AcpThreadViewEvent> for ThreadView {}
+/// Tracks the user's permission dropdown selection state for a specific tool call.
+///
+/// Default (no entry in the map) means the last dropdown choice is selected,
+/// which is typically "Only this time".
+#[derive(Clone)]
+pub(crate) enum PermissionSelection {
+ /// A specific choice from the dropdown (e.g., "Always for terminal", "Only this time").
+ /// The index corresponds to the position in the `choices` list from `PermissionOptions`.
+ Choice(usize),
+ /// "Select options…" mode where individual command patterns can be toggled.
+ /// Contains the indices of checked patterns in the `patterns` list.
+ /// All patterns start checked when this mode is first activated.
+ SelectedPatterns(Vec<usize>),
+}
+
+impl PermissionSelection {
+ /// Returns the choice index if a specific dropdown choice is selected,
+ /// or `None` if in per-command pattern mode.
+ pub(crate) fn choice_index(&self) -> Option<usize> {
+ match self {
+ Self::Choice(index) => Some(*index),
+ Self::SelectedPatterns(_) => None,
+ }
+ }
+
+ fn is_pattern_checked(&self, index: usize) -> bool {
+ match self {
+ Self::SelectedPatterns(checked) => checked.contains(&index),
+ _ => false,
+ }
+ }
+
+ fn has_any_checked_patterns(&self) -> bool {
+ match self {
+ Self::SelectedPatterns(checked) => !checked.is_empty(),
+ _ => false,
+ }
+ }
+
+ fn toggle_pattern(&mut self, index: usize) {
+ if let Self::SelectedPatterns(checked) = self {
+ if let Some(pos) = checked.iter().position(|&i| i == index) {
+ checked.swap_remove(pos);
+ } else {
+ checked.push(index);
+ }
+ }
+ }
+}
+
pub struct ThreadView {
pub id: acp::SessionId,
pub parent_id: Option<acp::SessionId>,
@@ -187,8 +242,7 @@ pub struct ThreadView {
pub last_token_limit_telemetry: Option<acp_thread::TokenUsageRatio>,
thread_feedback: ThreadFeedbackState,
pub list_state: ListState,
- pub prompt_capabilities: Rc<RefCell<PromptCapabilities>>,
- pub available_commands: Rc<RefCell<Vec<agent_client_protocol::AvailableCommand>>>,
+ pub session_capabilities: SharedSessionCapabilities,
/// Tracks which tool calls have their content/output expanded.
/// Used for showing/hiding tool call results, terminal output, etc.
pub expanded_tool_calls: HashSet<agent_client_protocol::ToolCallId>,
@@ -211,6 +265,9 @@ pub struct ThreadView {
pub is_loading_contents: bool,
pub new_server_version_available: Option<SharedString>,
pub resumed_without_history: bool,
+ pub(crate) permission_selections:
+ HashMap<agent_client_protocol::ToolCallId, PermissionSelection>,
+ pub resume_thread_metadata: Option<AgentSessionInfo>,
pub _cancel_task: Option<Task<()>>,
_save_task: Option<Task<()>>,
_draft_resolve_task: Option<Task<()>>,
@@ -268,8 +325,7 @@ impl ThreadView {
model_selector: Option<Entity<ModelSelectorPopover>>,
profile_selector: Option<Entity<ProfileSelector>>,
list_state: ListState,
- prompt_capabilities: Rc<RefCell<PromptCapabilities>>,
- available_commands: Rc<RefCell<Vec<agent_client_protocol::AvailableCommand>>>,
+ session_capabilities: SharedSessionCapabilities,
resumed_without_history: bool,
project: WeakEntity<Project>,
thread_store: Option<Entity<ThreadStore>>,
@@ -300,8 +356,7 @@ impl ThreadView {
thread_store,
history.as_ref().map(|h| h.downgrade()),
prompt_store,
- prompt_capabilities.clone(),
- available_commands.clone(),
+ session_capabilities.clone(),
agent_id.clone(),
&placeholder,
editor::EditorMode::AutoHeight {
@@ -417,8 +472,7 @@ impl ThreadView {
model_selector,
profile_selector,
list_state,
- prompt_capabilities,
- available_commands,
+ session_capabilities,
resumed_without_history,
_subscriptions: subscriptions,
permission_dropdown_handle: PopoverMenuHandle::default(),
@@ -447,6 +501,8 @@ impl ThreadView {
discarded_partial_edits: HashSet::default(),
is_loading_contents: false,
new_server_version_available: None,
+ permission_selections: HashMap::default(),
+ resume_thread_metadata: None,
_cancel_task: None,
_save_task: None,
_draft_resolve_task: None,
@@ -874,8 +930,9 @@ impl ThreadView {
// Does the agent have a specific logout command? Prefer that in case they need to reset internal state.
let logout_supported = text == "/logout"
&& self
- .available_commands
- .borrow()
+ .session_capabilities
+ .read()
+ .available_commands()
.iter()
.any(|command| command.name == "logout");
if can_login && !logout_supported {
@@ -1009,7 +1066,7 @@ impl ThreadView {
.join(" ");
let text = text.lines().next().unwrap_or("").trim();
if !text.is_empty() {
- let title: SharedString = util::truncate_and_trailoff(text, 20).into();
+ let title: SharedString = util::truncate_and_trailoff(text, 200).into();
thread.update(cx, |thread, cx| {
thread.set_provisional_title(title, cx);
})?;
@@ -1518,13 +1575,13 @@ impl ThreadView {
&mut self,
session_id: acp::SessionId,
tool_call_id: acp::ToolCallId,
- option_id: acp::PermissionOptionId,
+ outcome: SelectedPermissionOutcome,
option_kind: acp::PermissionOptionKind,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.conversation.update(cx, |conversation, cx| {
- conversation.authorize_tool_call(session_id, tool_call_id, option_id, option_kind, cx);
+ conversation.authorize_tool_call(session_id, tool_call_id, outcome, option_kind, cx);
});
if self.should_be_following {
self.workspace
@@ -1587,13 +1644,77 @@ impl ThreadView {
self.authorize_tool_call(
self.id.clone(),
tool_call_id,
- option_id,
+ option_id.into(),
option_kind,
window,
cx,
);
}
+ pub fn handle_select_permission_granularity(
+ &mut self,
+ action: &SelectPermissionGranularity,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let tool_call_id = acp::ToolCallId::new(action.tool_call_id.clone());
+ self.permission_selections
+ .insert(tool_call_id, PermissionSelection::Choice(action.index));
+
+ cx.notify();
+ }
+
+ pub fn handle_toggle_command_pattern(
+ &mut self,
+ action: &crate::ToggleCommandPattern,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let tool_call_id = acp::ToolCallId::new(action.tool_call_id.clone());
+
+ match self.permission_selections.get_mut(&tool_call_id) {
+ Some(PermissionSelection::SelectedPatterns(checked)) => {
+ // Already in pattern mode — toggle the individual pattern.
+ if let Some(pos) = checked.iter().position(|&i| i == action.pattern_index) {
+ checked.swap_remove(pos);
+ } else {
+ checked.push(action.pattern_index);
+ }
+ }
+ _ => {
+ // First click: activate "Select options" with all patterns checked.
+ let thread = self.thread.read(cx);
+ let pattern_count = thread
+ .entries()
+ .iter()
+ .find_map(|entry| {
+ if let AgentThreadEntry::ToolCall(call) = entry {
+ if call.id == tool_call_id {
+ if let ToolCallStatus::WaitingForConfirmation { options, .. } =
+ &call.status
+ {
+ if let PermissionOptions::DropdownWithPatterns {
+ patterns,
+ ..
+ } = options
+ {
+ return Some(patterns.len());
+ }
+ }
+ }
+ }
+ None
+ })
+ .unwrap_or(0);
+ self.permission_selections.insert(
+ tool_call_id,
+ PermissionSelection::SelectedPatterns((0..pattern_count).collect()),
+ );
+ }
+ }
+ cx.notify();
+ }
+
fn authorize_pending_with_granularity(
&mut self,
is_allow: bool,
@@ -1602,20 +1723,77 @@ impl ThreadView {
) -> Option<()> {
let (session_id, tool_call_id, options) =
self.conversation.read(cx).pending_tool_call(&self.id, cx)?;
- let PermissionOptions::Dropdown(choices) = options else {
- let kind = if is_allow {
- acp::PermissionOptionKind::AllowOnce
- } else {
- acp::PermissionOptionKind::RejectOnce
- };
- return self.authorize_pending_tool_call(kind, window, cx);
+ let options = options.clone();
+ self.authorize_with_granularity(session_id, tool_call_id, &options, is_allow, window, cx)
+ }
+
+ fn authorize_with_granularity(
+ &mut self,
+ session_id: acp::SessionId,
+ tool_call_id: acp::ToolCallId,
+ options: &PermissionOptions,
+ is_allow: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Option<()> {
+ let (choices, dropdown_with_patterns) = match options {
+ PermissionOptions::Dropdown(choices) => (choices.as_slice(), None),
+ PermissionOptions::DropdownWithPatterns {
+ choices,
+ patterns,
+ tool_name,
+ } => (
+ choices.as_slice(),
+ Some((patterns.as_slice(), tool_name.as_str())),
+ ),
+ _ => {
+ let kind = if is_allow {
+ acp::PermissionOptionKind::AllowOnce
+ } else {
+ acp::PermissionOptionKind::RejectOnce
+ };
+ return self.authorize_pending_tool_call(kind, window, cx);
+ }
};
- // Get selected index, defaulting to last option ("Only this time")
- let selected_index = self
- .conversation
- .read(cx)
- .selected_permission_granularity(&session_id, &tool_call_id)
+ let selection = self.permission_selections.get(&tool_call_id);
+
+ // When in per-command pattern mode, use the checked patterns.
+ if let Some(PermissionSelection::SelectedPatterns(checked)) = selection
+ && let Some((patterns, tool_name)) = dropdown_with_patterns
+ {
+ let checked_patterns: Vec<_> = patterns
+ .iter()
+ .enumerate()
+ .filter(|(index, _)| checked.contains(index))
+ .map(|(_, cp)| cp.pattern.clone())
+ .collect();
+
+ if !checked_patterns.is_empty() {
+ let (option_id_str, kind) = if is_allow {
+ (
+ format!("always_allow:{}", tool_name),
+ acp::PermissionOptionKind::AllowAlways,
+ )
+ } else {
+ (
+ format!("always_deny:{}", tool_name),
+ acp::PermissionOptionKind::RejectAlways,
+ )
+ };
+ let outcome =
+ SelectedPermissionOutcome::new(acp::PermissionOptionId::new(option_id_str))
+ .params(Some(SelectedPermissionParams::Terminal {
+ patterns: checked_patterns,
+ }));
+ self.authorize_tool_call(session_id, tool_call_id, outcome, kind, window, cx);
+ return Some(());
+ }
+ }
+
+ // Use the selected granularity choice ("Always for terminal" or "Only this time")
+ let selected_index = selection
+ .and_then(|s| s.choice_index())
.unwrap_or_else(|| choices.len().saturating_sub(1));
let selected_choice = choices.get(selected_index).or(choices.last())?;
@@ -1626,10 +1804,21 @@ impl ThreadView {
&selected_choice.deny
};
+ let params = if !selected_choice.sub_patterns.is_empty() {
+ Some(SelectedPermissionParams::Terminal {
+ patterns: selected_choice.sub_patterns.clone(),
+ })
+ } else {
+ None
+ };
+
+ let outcome =
+ SelectedPermissionOutcome::new(selected_option.option_id.clone()).params(params);
+
self.authorize_tool_call(
session_id,
tool_call_id,
- selected_option.option_id.clone(),
+ outcome,
selected_option.kind,
window,
cx,
@@ -3575,8 +3764,9 @@ impl ThreadView {
) -> Entity<ContextMenu> {
let message_editor = self.message_editor.clone();
let workspace = self.workspace.clone();
- let supports_images = self.prompt_capabilities.borrow().image;
- let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context;
+ let session_capabilities = self.session_capabilities.read();
+ let supports_images = session_capabilities.supports_images();
+ let supports_embedded_context = session_capabilities.supports_embedded_context();
let has_editor_selection = workspace
.upgrade()
@@ -5771,10 +5961,23 @@ impl ThreadView {
focus_handle,
cx,
),
- PermissionOptions::Dropdown(options) => self.render_permission_buttons_dropdown(
- session_id,
+ PermissionOptions::Dropdown(choices) => self.render_permission_buttons_with_dropdown(
is_first,
- options,
+ choices,
+ None,
+ entry_ix,
+ tool_call_id,
+ focus_handle,
+ cx,
+ ),
+ PermissionOptions::DropdownWithPatterns {
+ choices,
+ patterns,
+ tool_name,
+ } => self.render_permission_buttons_with_dropdown(
+ is_first,
+ choices,
+ Some((patterns, tool_name)),
entry_ix,
tool_call_id,
focus_handle,
@@ -5783,46 +5986,56 @@ impl ThreadView {
}
}
- fn render_permission_buttons_dropdown(
+ fn render_permission_buttons_with_dropdown(
&self,
- session_id: acp::SessionId,
is_first: bool,
choices: &[PermissionOptionChoice],
+ patterns: Option<(&[PermissionPattern], &str)>,
entry_ix: usize,
tool_call_id: acp::ToolCallId,
focus_handle: &FocusHandle,
cx: &Context<Self>,
) -> Div {
- // Get the selected granularity index, defaulting to the last option ("Only this time")
- let selected_index = self
- .conversation
- .read(cx)
- .selected_permission_granularity(&session_id, &tool_call_id)
- .unwrap_or_else(|| choices.len().saturating_sub(1));
+ let selection = self.permission_selections.get(&tool_call_id);
- let selected_choice = choices.get(selected_index).or(choices.last());
-
- let dropdown_label: SharedString = selected_choice
- .map(|choice| choice.label())
- .unwrap_or_else(|| "Only this time".into());
+ let selected_index = selection
+ .and_then(|s| s.choice_index())
+ .unwrap_or_else(|| choices.len().saturating_sub(1));
- let (allow_option_id, allow_option_kind, deny_option_id, deny_option_kind) =
- if let Some(choice) = selected_choice {
- (
- choice.allow.option_id.clone(),
- choice.allow.kind,
- choice.deny.option_id.clone(),
- choice.deny.kind,
- )
+ let dropdown_label: SharedString =
+ if matches!(selection, Some(PermissionSelection::SelectedPatterns(_))) {
+ "Always for selected commands".into()
} else {
- (
- acp::PermissionOptionId::new("allow"),
- acp::PermissionOptionKind::AllowOnce,
- acp::PermissionOptionId::new("deny"),
- acp::PermissionOptionKind::RejectOnce,
- )
+ choices
+ .get(selected_index)
+ .or(choices.last())
+ .map(|choice| choice.label())
+ .unwrap_or_else(|| "Only this time".into())
};
+ let dropdown = if let Some((pattern_list, tool_name)) = patterns {
+ self.render_permission_granularity_dropdown_with_patterns(
+ choices,
+ pattern_list,
+ tool_name,
+ dropdown_label,
+ entry_ix,
+ tool_call_id.clone(),
+ is_first,
+ cx,
+ )
+ } else {
+ self.render_permission_granularity_dropdown(
+ choices,
+ dropdown_label,
+ entry_ix,
+ tool_call_id.clone(),
+ selected_index,
+ is_first,
+ cx,
+ )
+ };
+
h_flex()
.w_full()
.p_1()
@@ -5852,19 +6065,8 @@ impl ThreadView {
)
})
.on_click(cx.listener({
- let session_id = session_id.clone();
- let tool_call_id = tool_call_id.clone();
- let option_id = allow_option_id;
- let option_kind = allow_option_kind;
move |this, _, window, cx| {
- this.authorize_tool_call(
- session_id.clone(),
- tool_call_id.clone(),
- option_id.clone(),
- option_kind,
- window,
- cx,
- );
+ this.authorize_pending_with_granularity(true, window, cx);
}
})),
)
@@ -5887,33 +6089,13 @@ impl ThreadView {
)
})
.on_click(cx.listener({
- let session_id = session_id.clone();
- let tool_call_id = tool_call_id.clone();
- let option_id = deny_option_id;
- let option_kind = deny_option_kind;
move |this, _, window, cx| {
- this.authorize_tool_call(
- session_id.clone(),
- tool_call_id.clone(),
- option_id.clone(),
- option_kind,
- window,
- cx,
- );
+ this.authorize_pending_with_granularity(false, window, cx);
}
})),
),
)
- .child(self.render_permission_granularity_dropdown(
- choices,
- dropdown_label,
- entry_ix,
- session_id,
- tool_call_id,
- selected_index,
- is_first,
- cx,
- ))
+ .child(dropdown)
}
fn render_permission_granularity_dropdown(
@@ -5921,7 +6103,6 @@ impl ThreadView {
choices: &[PermissionOptionChoice],
current_label: SharedString,
entry_ix: usize,
- session_id: acp::SessionId,
tool_call_id: acp::ToolCallId,
selected_index: usize,
is_first: bool,
@@ -5935,8 +6116,6 @@ impl ThreadView {
let permission_dropdown_handle = self.permission_dropdown_handle.clone();
- let conversation = self.conversation.clone();
-
PopoverMenu::new(("permission-granularity", entry_ix))
.with_handle(permission_dropdown_handle)
.trigger(
@@ -5959,8 +6138,6 @@ impl ThreadView {
}),
)
.menu(move |window, cx| {
- let session_id = session_id.clone();
- let conversation = conversation.clone();
let tool_call_id = tool_call_id.clone();
let options = menu_options.clone();
@@ -5968,23 +6145,22 @@ impl ThreadView {
for (index, display_name) in options.iter() {
let display_name = display_name.clone();
let index = *index;
- let session_id = session_id.clone();
- let conversation = conversation.clone();
- let tool_call_id = tool_call_id.clone();
+ let tool_call_id_for_entry = tool_call_id.clone();
let is_selected = index == selected_index;
menu = menu.toggleable_entry(
display_name,
is_selected,
IconPosition::End,
None,
- move |_window, cx| {
- conversation.update(cx, |conversation, _cx| {
- conversation.set_selected_permission_granularity(
- session_id.clone(),
- tool_call_id.clone(),
+ move |window, cx| {
+ window.dispatch_action(
+ SelectPermissionGranularity {
+ tool_call_id: tool_call_id_for_entry.0.to_string(),
index,
- );
- });
+ }
+ .boxed_clone(),
+ cx,
+ );
},
);
}
@@ -5995,6 +6171,193 @@ impl ThreadView {
.into_any_element()
}
+ fn render_permission_granularity_dropdown_with_patterns(
+ &self,
+ choices: &[PermissionOptionChoice],
+ patterns: &[PermissionPattern],
+ _tool_name: &str,
+ current_label: SharedString,
+ entry_ix: usize,
+ tool_call_id: acp::ToolCallId,
+ is_first: bool,
+ cx: &Context<Self>,
+ ) -> AnyElement {
+ let default_choice_index = choices.len().saturating_sub(1);
+ let menu_options: Vec<(usize, SharedString)> = choices
+ .iter()
+ .enumerate()
+ .map(|(i, choice)| (i, choice.label()))
+ .collect();
+
+ let pattern_options: Vec<(usize, SharedString)> = patterns
+ .iter()
+ .enumerate()
+ .map(|(i, cp)| {
+ (
+ i,
+ SharedString::from(format!("Always for `{}` commands", cp.display_name)),
+ )
+ })
+ .collect();
+
+ let pattern_count = patterns.len();
+ let permission_dropdown_handle = self.permission_dropdown_handle.clone();
+ let view = cx.entity().downgrade();
+
+ PopoverMenu::new(("permission-granularity", entry_ix))
+ .with_handle(permission_dropdown_handle.clone())
+ .anchor(Corner::TopRight)
+ .attach(Corner::BottomRight)
+ .trigger(
+ Button::new(("granularity-trigger", entry_ix), current_label)
+ .end_icon(
+ Icon::new(IconName::ChevronDown)
+ .size(IconSize::XSmall)
+ .color(Color::Muted),
+ )
+ .label_size(LabelSize::Small)
+ .when(is_first, |this| {
+ this.key_binding(
+ KeyBinding::for_action_in(
+ &crate::OpenPermissionDropdown as &dyn Action,
+ &self.focus_handle(cx),
+ cx,
+ )
+ .map(|kb| kb.size(rems_from_px(10.))),
+ )
+ }),
+ )
+ .menu(move |window, cx| {
+ let tool_call_id = tool_call_id.clone();
+ let options = menu_options.clone();
+ let patterns = pattern_options.clone();
+ let view = view.clone();
+ let dropdown_handle = permission_dropdown_handle.clone();
+
+ Some(ContextMenu::build_persistent(
+ window,
+ cx,
+ move |menu, _window, cx| {
+ let mut menu = menu;
+
+ // Read fresh selection state from the view on each rebuild.
+ let selection: Option<PermissionSelection> = view.upgrade().and_then(|v| {
+ let view = v.read(cx);
+ view.permission_selections.get(&tool_call_id).cloned()
+ });
+
+ let is_pattern_mode =
+ matches!(selection, Some(PermissionSelection::SelectedPatterns(_)));
+
+ // Granularity choices: "Always for terminal", "Only this time"
+ for (index, display_name) in options.iter() {
+ let display_name = display_name.clone();
+ let index = *index;
+ let tool_call_id_for_entry = tool_call_id.clone();
+ let is_selected = !is_pattern_mode
+ && selection
+ .as_ref()
+ .and_then(|s| s.choice_index())
+ .map_or(index == default_choice_index, |ci| ci == index);
+
+ let view = view.clone();
+ menu = menu.toggleable_entry(
+ display_name,
+ is_selected,
+ IconPosition::End,
+ None,
+ move |_window, cx| {
+ view.update(cx, |this, cx| {
+ this.permission_selections.insert(
+ tool_call_id_for_entry.clone(),
+ PermissionSelection::Choice(index),
+ );
+ cx.notify();
+ })
+ .log_err();
+ },
+ );
+ }
+
+ menu = menu.separator().header("Select Options…");
+
+ for (pattern_index, label) in patterns.iter() {
+ let label = label.clone();
+ let pattern_index = *pattern_index;
+ let tool_call_id_for_pattern = tool_call_id.clone();
+ let is_checked = selection
+ .as_ref()
+ .is_some_and(|s| s.is_pattern_checked(pattern_index));
+
+ let view = view.clone();
+ menu = menu.toggleable_entry(
+ label,
+ is_checked,
+ IconPosition::End,
+ None,
+ move |_window, cx| {
+ view.update(cx, |this, cx| {
+ let selection = this
+ .permission_selections
+ .get_mut(&tool_call_id_for_pattern);
+
+ match selection {
+ Some(PermissionSelection::SelectedPatterns(_)) => {
+ // Already in pattern mode — toggle.
+ this.permission_selections
+ .get_mut(&tool_call_id_for_pattern)
+ .expect("just matched above")
+ .toggle_pattern(pattern_index);
+ }
+ _ => {
+ // First click: activate pattern mode
+ // with all patterns checked.
+ this.permission_selections.insert(
+ tool_call_id_for_pattern.clone(),
+ PermissionSelection::SelectedPatterns(
+ (0..pattern_count).collect(),
+ ),
+ );
+ }
+ }
+ cx.notify();
+ })
+ .log_err();
+ },
+ );
+ }
+
+ let any_patterns_checked = selection
+ .as_ref()
+ .is_some_and(|s| s.has_any_checked_patterns());
+ let dropdown_handle = dropdown_handle.clone();
+ menu = menu.custom_row(move |_window, _cx| {
+ div()
+ .py_1()
+ .w_full()
+ .child(
+ Button::new("apply-patterns", "Apply")
+ .full_width()
+ .style(ButtonStyle::Outlined)
+ .label_size(LabelSize::Small)
+ .disabled(!any_patterns_checked)
+ .on_click({
+ let dropdown_handle = dropdown_handle.clone();
+ move |_event, _window, cx| {
+ dropdown_handle.hide(cx);
+ }
+ }),
+ )
+ .into_any_element()
+ });
+
+ menu
+ },
+ ))
+ })
+ .into_any_element()
+ }
+
fn render_permission_buttons_flat(
&self,
session_id: acp::SessionId,
@@ -6005,7 +6368,7 @@ impl ThreadView {
focus_handle: &FocusHandle,
cx: &Context<Self>,
) -> Div {
- let mut seen_kinds: ArrayVec<acp::PermissionOptionKind, 3> = ArrayVec::new();
+ let mut seen_kinds: ArrayVec<acp::PermissionOptionKind, 3, u8> = ArrayVec::new();
div()
.p_1()
@@ -6055,7 +6418,7 @@ impl ThreadView {
return this;
}
- seen_kinds.push(option.kind);
+ seen_kinds.push(option.kind).unwrap();
this.key_binding(
KeyBinding::for_action_in(action, focus_handle, cx)
@@ -6072,7 +6435,7 @@ impl ThreadView {
this.authorize_tool_call(
session_id.clone(),
tool_call_id.clone(),
- option_id.clone(),
+ option_id.clone().into(),
option_kind,
window,
cx,
@@ -7676,7 +8039,10 @@ impl ThreadView {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.permission_dropdown_handle.clone().toggle(window, cx);
+ let menu_handle = self.permission_dropdown_handle.clone();
+ window.defer(cx, move |window, cx| {
+ menu_handle.toggle(window, cx);
+ });
}
fn open_add_context_menu(
@@ -7815,6 +8181,8 @@ impl Render for ThreadView {
.on_action(cx.listener(Self::allow_once))
.on_action(cx.listener(Self::reject_once))
.on_action(cx.listener(Self::handle_authorize_tool_call))
+ .on_action(cx.listener(Self::handle_select_permission_granularity))
+ .on_action(cx.listener(Self::handle_toggle_command_pattern))
.on_action(cx.listener(Self::open_permission_dropdown))
.on_action(cx.listener(Self::open_add_context_menu))
.on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| {
@@ -1,9 +1,9 @@
-use std::{cell::RefCell, ops::Range, rc::Rc};
+use std::ops::Range;
use super::thread_history::ThreadHistory;
use acp_thread::{AcpThread, AgentThreadEntry};
use agent::ThreadStore;
-use agent_client_protocol::{self as acp, ToolCallId};
+use agent_client_protocol::ToolCallId;
use collections::HashMap;
use editor::{Editor, EditorEvent, EditorMode, MinimapVisibility, SizingBehavior};
use gpui::{
@@ -20,7 +20,7 @@ use theme::ThemeSettings;
use ui::{Context, TextSize};
use workspace::Workspace;
-use crate::message_editor::{MessageEditor, MessageEditorEvent};
+use crate::message_editor::{MessageEditor, MessageEditorEvent, SharedSessionCapabilities};
pub struct EntryViewState {
workspace: WeakEntity<Workspace>,
@@ -29,8 +29,7 @@ pub struct EntryViewState {
history: Option<WeakEntity<ThreadHistory>>,
prompt_store: Option<Entity<PromptStore>>,
entries: Vec<Entry>,
- prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
- available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ session_capabilities: SharedSessionCapabilities,
agent_id: AgentId,
}
@@ -41,8 +40,7 @@ impl EntryViewState {
thread_store: Option<Entity<ThreadStore>>,
history: Option<WeakEntity<ThreadHistory>>,
prompt_store: Option<Entity<PromptStore>>,
- prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
- available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ session_capabilities: SharedSessionCapabilities,
agent_id: AgentId,
) -> Self {
Self {
@@ -52,8 +50,7 @@ impl EntryViewState {
history,
prompt_store,
entries: Vec::new(),
- prompt_capabilities,
- available_commands,
+ session_capabilities,
agent_id,
}
}
@@ -94,8 +91,7 @@ impl EntryViewState {
self.thread_store.clone(),
self.history.clone(),
self.prompt_store.clone(),
- self.prompt_capabilities.clone(),
- self.available_commands.clone(),
+ self.session_capabilities.clone(),
self.agent_id.clone(),
"Edit message - @ to include context",
editor::EditorMode::AutoHeight {
@@ -458,6 +454,7 @@ fn diff_editor_text_style_refinement(cx: &mut App) -> TextStyleRefinement {
mod tests {
use std::path::Path;
use std::rc::Rc;
+ use std::sync::Arc;
use acp_thread::{AgentConnection, StubAgentConnection};
use agent_client_protocol as acp;
@@ -465,8 +462,10 @@ mod tests {
use editor::RowInfo;
use fs::FakeFs;
use gpui::{AppContext as _, TestAppContext};
+ use parking_lot::RwLock;
use crate::entry_view_state::EntryViewState;
+ use crate::message_editor::SessionCapabilities;
use multi_buffer::MultiBufferRow;
use pretty_assertions::assert_matches;
use project::Project;
@@ -524,8 +523,7 @@ mod tests {
thread_store,
history,
None,
- Default::default(),
- Default::default(),
+ Arc::new(RwLock::new(SessionCapabilities::default())),
"Test Agent".into(),
)
});
@@ -562,7 +562,7 @@ impl MentionSet {
));
let delegate =
AgentServerDelegate::new(project.read(cx).agent_server_store().clone(), None);
- let connection = server.connect(delegate, cx);
+ let connection = server.connect(delegate, project.clone(), cx);
cx.spawn(async move |_, cx| {
let agent = connection.await?;
let agent = agent.downcast::<agent::NativeAgentConnection>().unwrap();
@@ -739,7 +739,7 @@ mod tests {
/// Inserts a list of images into the editor as context mentions.
/// This is the shared implementation used by both paste and file picker operations.
pub(crate) async fn insert_images_as_context(
- images: Vec<gpui::Image>,
+ images: Vec<(gpui::Image, SharedString)>,
editor: Entity<Editor>,
mention_set: Entity<MentionSet>,
workspace: WeakEntity<Workspace>,
@@ -751,7 +751,7 @@ pub(crate) async fn insert_images_as_context(
let replacement_text = MentionUri::PastedImage.as_link().to_string();
- for image in images {
+ for (image, name) in images {
let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor
.update_in(cx, |editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
@@ -785,7 +785,7 @@ pub(crate) async fn insert_images_as_context(
excerpt_id,
text_anchor,
content_len,
- MentionUri::PastedImage.name().into(),
+ name.clone(),
IconName::Image.path().into(),
None,
None,
@@ -843,12 +843,24 @@ pub(crate) fn paste_images_as_context(
cx: &mut App,
) -> Option<Task<()>> {
let clipboard = cx.read_from_clipboard()?;
+
+ // Only handle paste if the first clipboard entry is an image or file path.
+ // If text comes first, return None so the caller falls through to text paste.
+ // This respects the priority order set by the source application.
+ if matches!(
+ clipboard.entries().first(),
+ Some(ClipboardEntry::String(_)) | None
+ ) {
+ return None;
+ }
+
Some(window.spawn(cx, async move |mut cx| {
use itertools::Itertools;
- let (mut images, paths) = clipboard
+ let default_name: SharedString = MentionUri::PastedImage.name().into();
+ let (mut images, paths): (Vec<(gpui::Image, SharedString)>, Vec<_>) = clipboard
.into_entries()
.filter_map(|entry| match entry {
- ClipboardEntry::Image(image) => Some(Either::Left(image)),
+ ClipboardEntry::Image(image) => Some(Either::Left((image, default_name.clone()))),
ClipboardEntry::ExternalPaths(paths) => Some(Either::Right(paths)),
_ => None,
})
@@ -859,24 +871,32 @@ pub(crate) fn paste_images_as_context(
cx.background_spawn(async move {
let mut images = vec![];
for path in paths.into_iter().flat_map(|paths| paths.paths().to_owned()) {
- let Ok(content) = async_fs::read(path).await else {
+ let Ok(content) = async_fs::read(&path).await else {
continue;
};
let Ok(format) = image::guess_format(&content) else {
continue;
};
- images.push(gpui::Image::from_bytes(
- match format {
- image::ImageFormat::Png => gpui::ImageFormat::Png,
- image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg,
- image::ImageFormat::WebP => gpui::ImageFormat::Webp,
- image::ImageFormat::Gif => gpui::ImageFormat::Gif,
- image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
- image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
- image::ImageFormat::Ico => gpui::ImageFormat::Ico,
- _ => continue,
- },
- content,
+ let name: SharedString = path
+ .file_name()
+ .and_then(|n| n.to_str())
+ .map(|s| SharedString::from(s.to_owned()))
+ .unwrap_or_else(|| default_name.clone());
+ images.push((
+ gpui::Image::from_bytes(
+ match format {
+ image::ImageFormat::Png => gpui::ImageFormat::Png,
+ image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg,
+ image::ImageFormat::WebP => gpui::ImageFormat::Webp,
+ image::ImageFormat::Gif => gpui::ImageFormat::Gif,
+ image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
+ image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
+ image::ImageFormat::Ico => gpui::ImageFormat::Ico,
+ _ => continue,
+ },
+ content,
+ ),
+ name,
));
}
images
@@ -885,12 +905,9 @@ pub(crate) fn paste_images_as_context(
);
}
- cx.update(|_window, cx| {
- cx.stop_propagation();
- })
- .ok();
-
- insert_images_as_context(images, editor, mention_set, workspace, &mut cx).await;
+ if !images.is_empty() {
+ insert_images_as_context(images, editor, mention_set, workspace, &mut cx).await;
+ }
}))
}
@@ -14,7 +14,6 @@ use acp_thread::MentionUri;
use agent::ThreadStore;
use agent_client_protocol as acp;
use anyhow::{Result, anyhow};
-use collections::HashSet;
use editor::{
Addon, AnchorRangeExt, ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode,
EditorStyle, Inlay, MultiBuffer, MultiBufferOffset, MultiBufferSnapshot, ToOffset,
@@ -25,13 +24,14 @@ use gpui::{
AppContext, ClipboardEntry, Context, Entity, EventEmitter, FocusHandle, Focusable, ImageFormat,
KeyContext, SharedString, Subscription, Task, TextStyle, WeakEntity,
};
-use language::{Buffer, Language, language_settings::InlayHintKind};
+use language::{Buffer, language_settings::InlayHintKind};
+use parking_lot::RwLock;
use project::AgentId;
use project::{CompletionIntent, InlayHint, InlayHintLabel, InlayId, Project, Worktree};
use prompt_store::PromptStore;
use rope::Point;
use settings::Settings;
-use std::{cell::RefCell, fmt::Write, ops::Range, rc::Rc, sync::Arc};
+use std::{fmt::Write, ops::Range, rc::Rc, sync::Arc};
use theme::ThemeSettings;
use ui::{ContextMenu, Disclosure, ElevationIndex, prelude::*};
use util::paths::PathStyle;
@@ -39,41 +39,39 @@ use util::{ResultExt, debug_panic};
use workspace::{CollaboratorId, Workspace};
use zed_actions::agent::{Chat, PasteRaw};
-pub struct MessageEditor {
- mention_set: Entity<MentionSet>,
- editor: Entity<Editor>,
- workspace: WeakEntity<Workspace>,
- prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
- available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
- agent_id: AgentId,
- thread_store: Option<Entity<ThreadStore>>,
- _subscriptions: Vec<Subscription>,
- _parse_slash_command_task: Task<()>,
+#[derive(Default)]
+pub struct SessionCapabilities {
+ prompt_capabilities: acp::PromptCapabilities,
+ available_commands: Vec<acp::AvailableCommand>,
}
-#[derive(Clone, Debug)]
-pub enum MessageEditorEvent {
- Send,
- SendImmediately,
- Cancel,
- Focus,
- LostFocus,
- InputAttempted(Arc<str>),
-}
+impl SessionCapabilities {
+ pub fn new(
+ prompt_capabilities: acp::PromptCapabilities,
+ available_commands: Vec<acp::AvailableCommand>,
+ ) -> Self {
+ Self {
+ prompt_capabilities,
+ available_commands,
+ }
+ }
-impl EventEmitter<MessageEditorEvent> for MessageEditor {}
+ pub fn supports_images(&self) -> bool {
+ self.prompt_capabilities.image
+ }
-const COMMAND_HINT_INLAY_ID: InlayId = InlayId::Hint(0);
+ pub fn supports_embedded_context(&self) -> bool {
+ self.prompt_capabilities.embedded_context
+ }
-impl PromptCompletionProviderDelegate for Entity<MessageEditor> {
- fn supports_images(&self, cx: &App) -> bool {
- self.read(cx).prompt_capabilities.borrow().image
+ pub fn available_commands(&self) -> &[acp::AvailableCommand] {
+ &self.available_commands
}
- fn supported_modes(&self, cx: &App) -> Vec<PromptContextType> {
+ fn supported_modes(&self, has_thread_store: bool) -> Vec<PromptContextType> {
let mut supported = vec![PromptContextType::File, PromptContextType::Symbol];
- if self.read(cx).prompt_capabilities.borrow().embedded_context {
- if self.read(cx).thread_store.is_some() {
+ if self.prompt_capabilities.embedded_context {
+ if has_thread_store {
supported.push(PromptContextType::Thread);
}
supported.extend(&[
@@ -86,10 +84,8 @@ impl PromptCompletionProviderDelegate for Entity<MessageEditor> {
supported
}
- fn available_commands(&self, cx: &App) -> Vec<crate::completion_provider::AvailableCommand> {
- self.read(cx)
- .available_commands
- .borrow()
+ pub fn completion_commands(&self) -> Vec<crate::completion_provider::AvailableCommand> {
+ self.available_commands
.iter()
.map(|cmd| crate::completion_provider::AvailableCommand {
name: cmd.name.clone().into(),
@@ -99,11 +95,68 @@ impl PromptCompletionProviderDelegate for Entity<MessageEditor> {
.collect()
}
+ pub fn set_prompt_capabilities(&mut self, prompt_capabilities: acp::PromptCapabilities) {
+ self.prompt_capabilities = prompt_capabilities;
+ }
+
+ pub fn set_available_commands(&mut self, available_commands: Vec<acp::AvailableCommand>) {
+ self.available_commands = available_commands;
+ }
+}
+
+pub type SharedSessionCapabilities = Arc<RwLock<SessionCapabilities>>;
+
+struct MessageEditorCompletionDelegate {
+ session_capabilities: SharedSessionCapabilities,
+ has_thread_store: bool,
+ message_editor: WeakEntity<MessageEditor>,
+}
+
+impl PromptCompletionProviderDelegate for MessageEditorCompletionDelegate {
+ fn supports_images(&self, _cx: &App) -> bool {
+ self.session_capabilities.read().supports_images()
+ }
+
+ fn supported_modes(&self, _cx: &App) -> Vec<PromptContextType> {
+ self.session_capabilities
+ .read()
+ .supported_modes(self.has_thread_store)
+ }
+
+ fn available_commands(&self, _cx: &App) -> Vec<crate::completion_provider::AvailableCommand> {
+ self.session_capabilities.read().completion_commands()
+ }
+
fn confirm_command(&self, cx: &mut App) {
- self.update(cx, |this, cx| this.send(cx));
+ let _ = self.message_editor.update(cx, |this, cx| this.send(cx));
}
}
+pub struct MessageEditor {
+ mention_set: Entity<MentionSet>,
+ editor: Entity<Editor>,
+ workspace: WeakEntity<Workspace>,
+ session_capabilities: SharedSessionCapabilities,
+ agent_id: AgentId,
+ thread_store: Option<Entity<ThreadStore>>,
+ _subscriptions: Vec<Subscription>,
+ _parse_slash_command_task: Task<()>,
+}
+
+#[derive(Clone, Debug)]
+pub enum MessageEditorEvent {
+ Send,
+ SendImmediately,
+ Cancel,
+ Focus,
+ LostFocus,
+ InputAttempted(Arc<str>),
+}
+
+impl EventEmitter<MessageEditorEvent> for MessageEditor {}
+
+const COMMAND_HINT_INLAY_ID: InlayId = InlayId::Hint(0);
+
impl MessageEditor {
pub fn new(
workspace: WeakEntity<Workspace>,
@@ -111,24 +164,25 @@ impl MessageEditor {
thread_store: Option<Entity<ThreadStore>>,
history: Option<WeakEntity<ThreadHistory>>,
prompt_store: Option<Entity<PromptStore>>,
- prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
- available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ session_capabilities: SharedSessionCapabilities,
agent_id: AgentId,
placeholder: &str,
mode: EditorMode,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
- let language = Language::new(
- language::LanguageConfig {
- completion_query_characters: HashSet::from_iter(['.', '-', '_', '@']),
- ..Default::default()
- },
- None,
- );
+ let language_registry = project
+ .upgrade()
+ .map(|project| project.read(cx).languages().clone());
let editor = cx.new(|cx| {
- let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx));
+ let buffer = cx.new(|cx| {
+ let buffer = Buffer::local("", cx);
+ if let Some(language_registry) = language_registry.as_ref() {
+ buffer.set_language_registry(language_registry.clone());
+ }
+ buffer
+ });
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let mut editor = Editor::new(mode, buffer, None, window, cx);
@@ -164,7 +218,11 @@ impl MessageEditor {
let mention_set =
cx.new(|_cx| MentionSet::new(project, thread_store.clone(), prompt_store.clone()));
let completion_provider = Rc::new(PromptCompletionProvider::new(
- cx.entity(),
+ MessageEditorCompletionDelegate {
+ session_capabilities: session_capabilities.clone(),
+ has_thread_store: thread_store.is_some(),
+ message_editor: cx.weak_entity(),
+ },
editor.downgrade(),
mention_set.clone(),
history,
@@ -230,12 +288,27 @@ impl MessageEditor {
}
}));
+ if let Some(language_registry) = language_registry {
+ let editor = editor.clone();
+ cx.spawn(async move |_, cx| {
+ let markdown = language_registry.language_for_name("Markdown").await?;
+ editor.update(cx, |editor, cx| {
+ if let Some(buffer) = editor.buffer().read(cx).as_singleton() {
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(Some(markdown), cx);
+ });
+ }
+ });
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
Self {
editor,
mention_set,
workspace,
- prompt_capabilities,
- available_commands,
+ session_capabilities,
agent_id,
thread_store,
_subscriptions: subscriptions,
@@ -243,18 +316,17 @@ impl MessageEditor {
}
}
- pub fn set_command_state(
+ pub fn set_session_capabilities(
&mut self,
- prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
- available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ session_capabilities: SharedSessionCapabilities,
_cx: &mut Context<Self>,
) {
- self.prompt_capabilities = prompt_capabilities;
- self.available_commands = available_commands;
+ self.session_capabilities = session_capabilities;
}
fn command_hint(&self, snapshot: &MultiBufferSnapshot) -> Option<Inlay> {
- let available_commands = self.available_commands.borrow();
+ let session_capabilities = self.session_capabilities.read();
+ let available_commands = session_capabilities.available_commands();
if available_commands.is_empty() {
return None;
}
@@ -334,7 +406,7 @@ impl MessageEditor {
.text_anchor
});
- let supports_images = self.prompt_capabilities.borrow().image;
+ let supports_images = self.session_capabilities.read().supports_images();
self.mention_set
.update(cx, |mention_set, cx| {
@@ -415,7 +487,11 @@ impl MessageEditor {
cx: &mut Context<Self>,
) -> Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>> {
let text = self.editor.read(cx).text(cx);
- let available_commands = self.available_commands.borrow().clone();
+ let available_commands = self
+ .session_capabilities
+ .read()
+ .available_commands()
+ .to_vec();
let agent_id = self.agent_id.clone();
let build_task = self.build_content_blocks(full_mention_content, cx);
@@ -442,7 +518,8 @@ impl MessageEditor {
.mention_set
.update(cx, |store, cx| store.contents(full_mention_content, cx));
let editor = self.editor.clone();
- let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context;
+ let supports_embedded_context =
+ self.session_capabilities.read().supports_embedded_context();
cx.spawn(async move |_, cx| {
let contents = contents.await?;
@@ -640,15 +717,14 @@ impl MessageEditor {
let Some(workspace) = self.workspace.upgrade() else {
return;
};
- let editor_clipboard_selections = cx
- .read_from_clipboard()
- .and_then(|item| item.entries().first().cloned())
- .and_then(|entry| match entry {
+ let editor_clipboard_selections = cx.read_from_clipboard().and_then(|item| {
+ item.entries().iter().find_map(|entry| match entry {
ClipboardEntry::String(text) => {
text.metadata_json::<Vec<editor::ClipboardSelection>>()
}
_ => None,
- });
+ })
+ });
// Insert creases for pasted clipboard selections that:
// 1. Contain exactly one selection
@@ -774,14 +850,12 @@ impl MessageEditor {
// Handle text paste with potential markdown mention links.
// This must be checked BEFORE paste_images_as_context because that function
// returns a task even when there are no images in the clipboard.
- if let Some(clipboard_text) = cx
- .read_from_clipboard()
- .and_then(|item| item.entries().first().cloned())
- .and_then(|entry| match entry {
+ if let Some(clipboard_text) = cx.read_from_clipboard().and_then(|item| {
+ item.entries().iter().find_map(|entry| match entry {
ClipboardEntry::String(text) => Some(text.text().to_string()),
_ => None,
})
- {
+ }) {
if clipboard_text.contains("[@") {
cx.stop_propagation();
let selections_before = self.editor.update(cx, |editor, cx| {
@@ -825,7 +899,7 @@ impl MessageEditor {
}
if !all_mentions.is_empty() {
- let supports_images = self.prompt_capabilities.borrow().image;
+ let supports_images = self.session_capabilities.read().supports_images();
let http_client = workspace.read(cx).client().http_client();
for (anchor, content_len, mention_uri) in all_mentions {
@@ -872,7 +946,20 @@ impl MessageEditor {
}
}
- if self.prompt_capabilities.borrow().image
+ let has_non_text_content = cx
+ .read_from_clipboard()
+ .map(|item| {
+ item.entries().iter().any(|entry| {
+ matches!(
+ entry,
+ ClipboardEntry::Image(_) | ClipboardEntry::ExternalPaths(_)
+ )
+ })
+ })
+ .unwrap_or(false);
+
+ if self.session_capabilities.read().supports_images()
+ && has_non_text_content
&& let Some(task) = paste_images_as_context(
self.editor.clone(),
self.mention_set.clone(),
@@ -881,6 +968,7 @@ impl MessageEditor {
cx,
)
{
+ cx.stop_propagation();
task.detach();
return;
}
@@ -947,7 +1035,7 @@ impl MessageEditor {
cx,
);
});
- let supports_images = self.prompt_capabilities.borrow().image;
+ let supports_images = self.session_capabilities.read().supports_images();
tasks.push(self.mention_set.update(cx, |mention_set, cx| {
mention_set.confirm_mention_completion(
file_name,
@@ -1202,7 +1290,7 @@ impl MessageEditor {
return;
};
let Some(completion) =
- PromptCompletionProvider::<Entity<MessageEditor>>::completion_for_action(
+ PromptCompletionProvider::<MessageEditorCompletionDelegate>::completion_for_action(
PromptContextAction::AddSelections,
anchor..anchor,
self.editor.downgrade(),
@@ -1224,7 +1312,7 @@ impl MessageEditor {
}
pub fn add_images_from_picker(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- if !self.prompt_capabilities.borrow().image {
+ if !self.session_capabilities.read().supports_images() {
return;
}
@@ -1278,7 +1366,12 @@ impl MessageEditor {
continue;
};
- images.push(gpui::Image::from_bytes(format, content));
+ let name: gpui::SharedString = path
+ .file_name()
+ .and_then(|n| n.to_str())
+ .map(|s| gpui::SharedString::from(s.to_owned()))
+ .unwrap_or_else(|| "Image".into());
+ images.push((gpui::Image::from_bytes(format, content), name));
}
crate::mention_set::insert_images_as_context(
@@ -1651,7 +1744,7 @@ fn find_matching_bracket(text: &str, open: char, close: char) -> Option<usize> {
#[cfg(test)]
mod tests {
- use std::{cell::RefCell, ops::Range, path::Path, rc::Rc, sync::Arc};
+ use std::{ops::Range, path::Path, sync::Arc};
use acp_thread::MentionUri;
use agent::{ThreadStore, outline};
@@ -1669,6 +1762,7 @@ mod tests {
};
use language_model::LanguageModelRegistry;
use lsp::{CompletionContext, CompletionTriggerKind};
+ use parking_lot::RwLock;
use project::{CompletionIntent, Project, ProjectPath};
use serde_json::json;
@@ -1677,10 +1771,10 @@ mod tests {
use util::{path, paths::PathStyle, rel_path::rel_path};
use workspace::{AppState, Item, MultiWorkspace};
- use crate::completion_provider::{PromptCompletionProviderDelegate, PromptContextType};
+ use crate::completion_provider::PromptContextType;
use crate::{
conversation_view::tests::init_test,
- message_editor::{Mention, MessageEditor, parse_mention_links},
+ message_editor::{Mention, MessageEditor, SessionCapabilities, parse_mention_links},
};
#[test]
@@ -1798,7 +1892,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -1893,9 +1986,10 @@ mod tests {
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let thread_store = None;
- let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
- // Start with no available commands - simulating Claude which doesn't support slash commands
- let available_commands = Rc::new(RefCell::new(vec![]));
+ let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
+ acp::PromptCapabilities::default(),
+ vec![],
+ )));
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
@@ -1909,8 +2003,7 @@ mod tests {
thread_store.clone(),
None,
None,
- prompt_capabilities.clone(),
- available_commands.clone(),
+ session_capabilities.clone(),
"Claude Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -1940,7 +2033,9 @@ mod tests {
assert!(error_message.contains("Available commands: none"));
// Now simulate Claude providing its list of available commands (which doesn't include file)
- available_commands.replace(vec![acp::AvailableCommand::new("help", "Get help")]);
+ session_capabilities
+ .write()
+ .set_available_commands(vec![acp::AvailableCommand::new("help", "Get help")]);
// Test that unsupported slash commands trigger an error when we have a list of available commands
editor.update_in(cx, |editor, window, cx| {
@@ -2054,15 +2149,17 @@ mod tests {
let mut cx = VisualTestContext::from_window(window.into(), cx);
let thread_store = None;
- let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
- let available_commands = Rc::new(RefCell::new(vec![
- acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"),
- acp::AvailableCommand::new("say-hello", "Say hello to whoever you want").input(
- acp::AvailableCommandInput::Unstructured(acp::UnstructuredCommandInput::new(
- "<name>",
- )),
- ),
- ]));
+ let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
+ acp::PromptCapabilities::default(),
+ vec![
+ acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"),
+ acp::AvailableCommand::new("say-hello", "Say hello to whoever you want").input(
+ acp::AvailableCommandInput::Unstructured(acp::UnstructuredCommandInput::new(
+ "<name>",
+ )),
+ ),
+ ],
+ )));
let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
let workspace_handle = cx.weak_entity();
@@ -2073,8 +2170,7 @@ mod tests {
thread_store.clone(),
None,
None,
- prompt_capabilities.clone(),
- available_commands.clone(),
+ session_capabilities.clone(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -2287,7 +2383,10 @@ mod tests {
}
let thread_store = cx.new(|cx| ThreadStore::new(cx));
- let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
+ let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
+ acp::PromptCapabilities::default(),
+ vec![],
+ )));
let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
let workspace_handle = cx.weak_entity();
@@ -2298,8 +2397,7 @@ mod tests {
Some(thread_store),
None,
None,
- prompt_capabilities.clone(),
- Default::default(),
+ session_capabilities.clone(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -2345,12 +2443,14 @@ mod tests {
editor.set_text("", window, cx);
});
- prompt_capabilities.replace(
- acp::PromptCapabilities::new()
- .image(true)
- .audio(true)
- .embedded_context(true),
- );
+ message_editor.update(&mut cx, |editor, _cx| {
+ editor.session_capabilities.write().set_prompt_capabilities(
+ acp::PromptCapabilities::new()
+ .image(true)
+ .audio(true)
+ .embedded_context(true),
+ );
+ });
cx.simulate_input("Lorem ");
@@ -2791,7 +2891,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -2803,8 +2902,9 @@ mod tests {
);
// Enable embedded context so files are actually included
editor
- .prompt_capabilities
- .replace(acp::PromptCapabilities::new().embedded_context(true));
+ .session_capabilities
+ .write()
+ .set_prompt_capabilities(acp::PromptCapabilities::new().embedded_context(true));
editor
})
});
@@ -2893,7 +2993,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -2964,7 +3063,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3019,7 +3117,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3034,13 +3131,19 @@ mod tests {
message_editor.update(cx, |editor, _cx| {
editor
- .prompt_capabilities
- .replace(acp::PromptCapabilities::new().embedded_context(true));
+ .session_capabilities
+ .write()
+ .set_prompt_capabilities(acp::PromptCapabilities::new().embedded_context(true));
});
let supported_modes = {
let app = cx.app.borrow();
- message_editor.supported_modes(&app)
+ let _ = &app;
+ message_editor
+ .read(&app)
+ .session_capabilities
+ .read()
+ .supported_modes(false)
};
assert!(
@@ -3072,7 +3175,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3087,13 +3189,19 @@ mod tests {
message_editor.update(cx, |editor, _cx| {
editor
- .prompt_capabilities
- .replace(acp::PromptCapabilities::new().embedded_context(true));
+ .session_capabilities
+ .write()
+ .set_prompt_capabilities(acp::PromptCapabilities::new().embedded_context(true));
});
let supported_modes = {
let app = cx.app.borrow();
- message_editor.supported_modes(&app)
+ let _ = &app;
+ message_editor
+ .read(&app)
+ .session_capabilities
+ .read()
+ .supported_modes(true)
};
assert!(
@@ -3126,7 +3234,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3190,12 +3297,11 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
- max_lines: None,
min_lines: 1,
+ max_lines: None,
},
window,
cx,
@@ -3247,8 +3353,9 @@ mod tests {
message_editor.update(cx, |editor, _cx| {
editor
- .prompt_capabilities
- .replace(acp::PromptCapabilities::new().embedded_context(true))
+ .session_capabilities
+ .write()
+ .set_prompt_capabilities(acp::PromptCapabilities::new().embedded_context(true))
});
let content = message_editor
@@ -3351,7 +3458,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::full(),
@@ -3463,11 +3569,10 @@ mod tests {
MessageEditor::new(
workspace_handle,
project.downgrade(),
- Some(thread_store),
+ Some(thread_store.clone()),
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3548,7 +3653,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3608,6 +3712,86 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_paste_mention_link_with_completion_trigger_does_not_panic(
+ cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+
+ let app_state = cx.update(AppState::test);
+
+ cx.update(|cx| {
+ editor::init(cx);
+ workspace::init(app_state.clone(), cx);
+ });
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(path!("/project"), json!({"file.txt": "content"}))
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), [path!("/project").as_ref()], cx).await;
+ let window =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+
+ let mut cx = VisualTestContext::from_window(window.into(), cx);
+
+ let thread_store = cx.new(|cx| ThreadStore::new(cx));
+
+ let (_message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
+ let workspace_handle = cx.weak_entity();
+ let message_editor = cx.new(|cx| {
+ MessageEditor::new(
+ workspace_handle,
+ project.downgrade(),
+ Some(thread_store),
+ None,
+ None,
+ Default::default(),
+ "Test Agent".into(),
+ "Test",
+ EditorMode::AutoHeight {
+ max_lines: None,
+ min_lines: 1,
+ },
+ window,
+ cx,
+ )
+ });
+ workspace.active_pane().update(cx, |pane, cx| {
+ pane.add_item(
+ Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
+ true,
+ true,
+ None,
+ window,
+ cx,
+ );
+ });
+ message_editor.read(cx).focus_handle(cx).focus(window, cx);
+ let editor = message_editor.read(cx).editor().clone();
+ (message_editor, editor)
+ });
+
+ cx.simulate_input("@");
+
+ editor.update(&mut cx, |editor, cx| {
+ assert_eq!(editor.text(cx), "@");
+ assert!(editor.has_visible_completions_menu());
+ });
+
+ cx.write_to_clipboard(ClipboardItem::new_string("[@f](file:///test.txt) @".into()));
+ cx.dispatch_action(Paste);
+
+ editor.update(&mut cx, |editor, cx| {
+ assert!(editor.text(cx).contains("[@f](file:///test.txt)"));
+ });
+ }
+
// Helper that creates a minimal MessageEditor inside a window, returning both
// the entity and the underlying VisualTestContext so callers can drive updates.
async fn setup_message_editor(
@@ -3630,7 +3814,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3781,7 +3964,6 @@ mod tests {
None,
None,
Default::default(),
- Default::default(),
"Test Agent".into(),
"Test",
EditorMode::AutoHeight {
@@ -3,6 +3,7 @@ use agent_client_protocol as acp;
use agent_servers::{AgentServer, AgentServerDelegate};
use gpui::{Entity, Task, TestAppContext, VisualTestContext};
use project::AgentId;
+use project::Project;
use settings::SettingsStore;
use std::any::Any;
use std::rc::Rc;
@@ -12,11 +13,23 @@ use crate::agent_panel;
pub struct StubAgentServer<C> {
connection: C,
+ agent_id: AgentId,
}
-impl<C> StubAgentServer<C> {
+impl<C> StubAgentServer<C>
+where
+ C: AgentConnection,
+{
pub fn new(connection: C) -> Self {
- Self { connection }
+ Self {
+ connection,
+ agent_id: "Test".into(),
+ }
+ }
+
+ pub fn with_connection_agent_id(mut self) -> Self {
+ self.agent_id = self.connection.agent_id();
+ self
}
}
@@ -39,12 +52,13 @@ where
}
fn agent_id(&self) -> AgentId {
- "Test".into()
+ self.agent_id.clone()
}
fn connect(
&self,
_delegate: AgentServerDelegate,
+ _project: Entity<Project>,
_cx: &mut gpui::App,
) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
Task::ready(Ok(Rc::new(self.connection.clone())))
@@ -81,6 +95,23 @@ pub fn open_thread_with_connection(
cx.run_until_parked();
}
+pub fn open_thread_with_custom_connection<C>(
+ panel: &Entity<AgentPanel>,
+ connection: C,
+ cx: &mut VisualTestContext,
+) where
+ C: 'static + AgentConnection + Send + Clone,
+{
+ panel.update_in(cx, |panel, window, cx| {
+ panel.open_external_thread_with_server(
+ Rc::new(StubAgentServer::new(connection).with_connection_agent_id()),
+ window,
+ cx,
+ );
+ });
+ cx.run_until_parked();
+}
+
pub fn send_message(panel: &Entity<AgentPanel>, cx: &mut VisualTestContext) {
let thread_view = panel.read_with(cx, |panel, cx| panel.active_thread_view(cx).unwrap());
let message_editor = thread_view.read_with(cx, |view, _cx| view.message_editor.clone());
@@ -1761,15 +1761,14 @@ impl TextThreadEditor {
let Some(workspace) = self.workspace.upgrade() else {
return;
};
- let editor_clipboard_selections = cx
- .read_from_clipboard()
- .and_then(|item| item.entries().first().cloned())
- .and_then(|entry| match entry {
+ let editor_clipboard_selections = cx.read_from_clipboard().and_then(|item| {
+ item.entries().iter().find_map(|entry| match entry {
ClipboardEntry::String(text) => {
text.metadata_json::<Vec<editor::ClipboardSelection>>()
}
_ => None,
- });
+ })
+ });
// Insert creases for pasted clipboard selections that:
// 1. Contain exactly one selection
@@ -1801,7 +1800,14 @@ impl TextThreadEditor {
.unwrap_or(false);
if should_insert_creases && let Some(clipboard_item) = cx.read_from_clipboard() {
- if let Some(ClipboardEntry::String(clipboard_text)) = clipboard_item.entries().first() {
+ let clipboard_text = clipboard_item
+ .entries()
+ .iter()
+ .find_map(|entry| match entry {
+ ClipboardEntry::String(s) => Some(s),
+ _ => None,
+ });
+ if let Some(clipboard_text) = clipboard_text {
if let Some(selections) = editor_clipboard_selections {
cx.stop_propagation();
@@ -1872,65 +1878,60 @@ impl TextThreadEditor {
cx.stop_propagation();
- let mut images = if let Some(item) = cx.read_from_clipboard() {
- item.into_entries()
- .filter_map(|entry| {
- if let ClipboardEntry::Image(image) = entry {
- Some(image)
- } else {
- None
- }
- })
- .collect()
- } else {
- Vec::new()
- };
+ let clipboard_item = cx.read_from_clipboard();
- if let Some(paths) = cx.read_from_clipboard() {
- for path in paths
- .into_entries()
- .filter_map(|entry| {
- if let ClipboardEntry::ExternalPaths(paths) = entry {
- Some(paths.paths().to_owned())
- } else {
- None
+ let mut images: Vec<gpui::Image> = Vec::new();
+ let mut paths: Vec<std::path::PathBuf> = Vec::new();
+ let mut metadata: Option<CopyMetadata> = None;
+
+ if let Some(item) = &clipboard_item {
+ for entry in item.entries() {
+ match entry {
+ ClipboardEntry::Image(image) => images.push(image.clone()),
+ ClipboardEntry::ExternalPaths(external) => {
+ paths.extend(external.paths().iter().cloned());
}
- })
- .flatten()
- {
- let Ok(content) = std::fs::read(path) else {
- continue;
- };
- let Ok(format) = image::guess_format(&content) else {
- continue;
- };
- images.push(gpui::Image::from_bytes(
- match format {
- image::ImageFormat::Png => gpui::ImageFormat::Png,
- image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg,
- image::ImageFormat::WebP => gpui::ImageFormat::Webp,
- image::ImageFormat::Gif => gpui::ImageFormat::Gif,
- image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
- image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
- image::ImageFormat::Ico => gpui::ImageFormat::Ico,
- _ => continue,
- },
- content,
- ));
+ ClipboardEntry::String(text) => {
+ if metadata.is_none() {
+ metadata = text.metadata_json::<CopyMetadata>();
+ }
+ }
+ }
}
}
- let metadata = if let Some(item) = cx.read_from_clipboard() {
- item.entries().first().and_then(|entry| {
- if let ClipboardEntry::String(text) = entry {
- text.metadata_json::<CopyMetadata>()
- } else {
- None
- }
- })
- } else {
- None
- };
+ for path in paths {
+ let Ok(content) = std::fs::read(path) else {
+ continue;
+ };
+ let Ok(format) = image::guess_format(&content) else {
+ continue;
+ };
+ images.push(gpui::Image::from_bytes(
+ match format {
+ image::ImageFormat::Png => gpui::ImageFormat::Png,
+ image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg,
+ image::ImageFormat::WebP => gpui::ImageFormat::Webp,
+ image::ImageFormat::Gif => gpui::ImageFormat::Gif,
+ image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
+ image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
+ image::ImageFormat::Ico => gpui::ImageFormat::Ico,
+ _ => continue,
+ },
+ content,
+ ));
+ }
+
+ // Respect entry priority order — if the first entry is text, the source
+ // application considers text the primary content. Discard collected images
+ // so the text-paste branch runs instead.
+ if clipboard_item
+ .as_ref()
+ .and_then(|item| item.entries().first())
+ .is_some_and(|entry| matches!(entry, ClipboardEntry::String(_)))
+ {
+ images.clear();
+ }
if images.is_empty() {
self.editor.update(cx, |editor, cx| {
@@ -1,8 +1,9 @@
use std::{path::Path, sync::Arc};
+use acp_thread::AgentSessionInfo;
use agent::{ThreadStore, ZED_AGENT_ID};
use agent_client_protocol as acp;
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use chrono::{DateTime, Utc};
use collections::HashMap;
use db::{
@@ -13,13 +14,15 @@ use db::{
sqlez_macros::sql,
};
use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
+use futures::{FutureExt as _, future::Shared};
use gpui::{AppContext as _, Entity, Global, Subscription, Task};
use project::AgentId;
use ui::{App, Context, SharedString};
+use util::ResultExt as _;
use workspace::PathList;
pub fn init(cx: &mut App) {
- ThreadMetadataStore::init_global(cx);
+ SidebarThreadMetadataStore::init_global(cx);
if cx.has_flag::<AgentV2FeatureFlag>() {
migrate_thread_metadata(cx);
@@ -33,41 +36,64 @@ pub fn init(cx: &mut App) {
}
/// Migrate existing thread metadata from native agent thread store to the new metadata storage.
+/// We migrate the last 10 threads per project and skip threads that do not have a project.
///
/// TODO: Remove this after N weeks of shipping the sidebar
fn migrate_thread_metadata(cx: &mut App) {
- ThreadMetadataStore::global(cx).update(cx, |store, cx| {
- let list = store.list(cx);
- cx.spawn(async move |this, cx| {
- let Ok(list) = list.await else {
- return;
- };
- if list.is_empty() {
- this.update(cx, |this, cx| {
- let metadata = ThreadStore::global(cx)
- .read(cx)
- .entries()
- .map(|entry| ThreadMetadata {
- session_id: entry.id,
- agent_id: None,
- title: entry.title,
- updated_at: entry.updated_at,
- created_at: entry.created_at,
- folder_paths: entry.folder_paths,
- })
- .collect::<Vec<_>>();
- for entry in metadata {
- this.save(entry, cx).detach_and_log_err(cx);
+ const MAX_MIGRATED_THREADS_PER_PROJECT: usize = 10;
+
+ let store = SidebarThreadMetadataStore::global(cx);
+ let db = store.read(cx).db.clone();
+
+ cx.spawn(async move |cx| {
+ if !db.is_empty()? {
+ return Ok::<(), anyhow::Error>(());
+ }
+
+ let metadata = store.read_with(cx, |_store, app| {
+ let mut migrated_threads_per_project = HashMap::default();
+
+ ThreadStore::global(app)
+ .read(app)
+ .entries()
+ .filter_map(|entry| {
+ if entry.folder_paths.is_empty() {
+ return None;
+ }
+
+ let migrated_thread_count = migrated_threads_per_project
+ .entry(entry.folder_paths.clone())
+ .or_insert(0);
+ if *migrated_thread_count >= MAX_MIGRATED_THREADS_PER_PROJECT {
+ return None;
}
+ *migrated_thread_count += 1;
+
+ Some(ThreadMetadata {
+ session_id: entry.id,
+ agent_id: None,
+ title: entry.title,
+ updated_at: entry.updated_at,
+ created_at: entry.created_at,
+ folder_paths: entry.folder_paths,
+ })
})
- .ok();
- }
- })
- .detach();
- });
+ .collect::<Vec<_>>()
+ });
+
+ // Manually save each entry to the database and call reload, otherwise
+ // we'll end up triggering lots of reloads after each save
+ for entry in metadata {
+ db.save(entry).await?;
+ }
+
+ let _ = store.update(cx, |store, cx| store.reload(cx));
+ Ok(())
+ })
+ .detach_and_log_err(cx);
}
-struct GlobalThreadMetadataStore(Entity<ThreadMetadataStore>);
+struct GlobalThreadMetadataStore(Entity<SidebarThreadMetadataStore>);
impl Global for GlobalThreadMetadataStore {}
/// Lightweight metadata for any thread (native or ACP), enough to populate
@@ -83,19 +109,82 @@ pub struct ThreadMetadata {
pub folder_paths: PathList,
}
-pub struct ThreadMetadataStore {
+impl ThreadMetadata {
+ pub fn from_session_info(agent_id: AgentId, session: &AgentSessionInfo) -> Self {
+ let session_id = session.session_id.clone();
+ let title = session.title.clone().unwrap_or_default();
+ let updated_at = session.updated_at.unwrap_or_else(|| Utc::now());
+ let created_at = session.created_at.unwrap_or(updated_at);
+ let folder_paths = session.work_dirs.clone().unwrap_or_default();
+ let agent_id = if agent_id.as_ref() == ZED_AGENT_ID.as_ref() {
+ None
+ } else {
+ Some(agent_id)
+ };
+ Self {
+ session_id,
+ agent_id,
+ title,
+ updated_at,
+ created_at: Some(created_at),
+ folder_paths,
+ }
+ }
+
+ pub fn from_thread(thread: &Entity<acp_thread::AcpThread>, cx: &App) -> Self {
+ let thread_ref = thread.read(cx);
+ let session_id = thread_ref.session_id().clone();
+ let title = thread_ref.title();
+ let updated_at = Utc::now();
+
+ let agent_id = thread_ref.connection().agent_id();
+
+ let agent_id = if agent_id.as_ref() == ZED_AGENT_ID.as_ref() {
+ None
+ } else {
+ Some(agent_id)
+ };
+
+ let folder_paths = {
+ let project = thread_ref.project().read(cx);
+ let paths: Vec<Arc<Path>> = project
+ .visible_worktrees(cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ .collect();
+ PathList::new(&paths)
+ };
+
+ Self {
+ session_id,
+ agent_id,
+ title,
+ created_at: Some(updated_at), // handled by db `ON CONFLICT`
+ updated_at,
+ folder_paths,
+ }
+ }
+}
+
+/// The store holds all metadata needed to show threads in the sidebar.
+/// Effectively, all threads stored in here are "non-archived".
+///
+/// Automatically listens to AcpThread events and updates metadata if it has changed.
+pub struct SidebarThreadMetadataStore {
db: ThreadMetadataDb,
+ threads: Vec<ThreadMetadata>,
+ threads_by_paths: HashMap<PathList, Vec<ThreadMetadata>>,
+ reload_task: Option<Shared<Task<()>>>,
session_subscriptions: HashMap<acp::SessionId, Subscription>,
}
-impl ThreadMetadataStore {
+impl SidebarThreadMetadataStore {
#[cfg(not(any(test, feature = "test-support")))]
pub fn init_global(cx: &mut App) {
if cx.has_global::<Self>() {
return;
}
- let db = THREAD_METADATA_DB.clone();
+ let db = ThreadMetadataDb::global(cx);
let thread_store = cx.new(|cx| Self::new(db, cx));
cx.set_global(GlobalThreadMetadataStore(thread_store));
}
@@ -119,12 +208,61 @@ impl ThreadMetadataStore {
cx.global::<GlobalThreadMetadataStore>().0.clone()
}
- pub fn list(&self, cx: &App) -> Task<Result<Vec<ThreadMetadata>>> {
+ pub fn is_empty(&self) -> bool {
+ self.threads.is_empty()
+ }
+
+ pub fn entries(&self) -> impl Iterator<Item = ThreadMetadata> + '_ {
+ self.threads.iter().cloned()
+ }
+
+ pub fn entry_ids(&self) -> impl Iterator<Item = acp::SessionId> + '_ {
+ self.threads.iter().map(|thread| thread.session_id.clone())
+ }
+
+ pub fn entries_for_path(
+ &self,
+ path_list: &PathList,
+ ) -> impl Iterator<Item = ThreadMetadata> + '_ {
+ self.threads_by_paths
+ .get(path_list)
+ .into_iter()
+ .flatten()
+ .cloned()
+ }
+
+ fn reload(&mut self, cx: &mut Context<Self>) -> Shared<Task<()>> {
let db = self.db.clone();
- cx.background_spawn(async move {
- let s = db.list()?;
- Ok(s)
- })
+ self.reload_task.take();
+
+ let list_task = cx
+ .background_spawn(async move { db.list().context("Failed to fetch sidebar metadata") });
+
+ let reload_task = cx
+ .spawn(async move |this, cx| {
+ let Some(rows) = list_task.await.log_err() else {
+ return;
+ };
+
+ this.update(cx, |this, cx| {
+ this.threads.clear();
+ this.threads_by_paths.clear();
+
+ for row in rows {
+ this.threads_by_paths
+ .entry(row.folder_paths.clone())
+ .or_default()
+ .push(row.clone());
+ this.threads.push(row);
+ }
+
+ cx.notify();
+ })
+ .ok();
+ })
+ .shared();
+ self.reload_task = Some(reload_task.clone());
+ reload_task
}
pub fn save(&mut self, metadata: ThreadMetadata, cx: &mut Context<Self>) -> Task<Result<()>> {
@@ -135,7 +273,9 @@ impl ThreadMetadataStore {
let db = self.db.clone();
cx.spawn(async move |this, cx| {
db.save(metadata).await?;
- this.update(cx, |_this, cx| cx.notify())
+ let reload_task = this.update(cx, |this, cx| this.reload(cx))?;
+ reload_task.await;
+ Ok(())
})
}
@@ -151,7 +291,9 @@ impl ThreadMetadataStore {
let db = self.db.clone();
cx.spawn(async move |this, cx| {
db.delete(session_id).await?;
- this.update(cx, |_this, cx| cx.notify())
+ let reload_task = this.update(cx, |this, cx| this.reload(cx))?;
+ reload_task.await;
+ Ok(())
})
}
@@ -188,10 +330,15 @@ impl ThreadMetadataStore {
})
.detach();
- Self {
+ let mut this = Self {
db,
+ threads: Vec::new(),
+ threads_by_paths: HashMap::default(),
+ reload_task: None,
session_subscriptions: HashMap::default(),
- }
+ };
+ let _ = this.reload(cx);
+ this
}
fn handle_thread_update(
@@ -207,51 +354,26 @@ impl ThreadMetadataStore {
match event {
acp_thread::AcpThreadEvent::NewEntry
+ | acp_thread::AcpThreadEvent::TitleUpdated
| acp_thread::AcpThreadEvent::EntryUpdated(_)
- | acp_thread::AcpThreadEvent::TitleUpdated => {
- let metadata = Self::metadata_for_acp_thread(thread.read(cx), cx);
+ | acp_thread::AcpThreadEvent::EntriesRemoved(_)
+ | acp_thread::AcpThreadEvent::ToolAuthorizationRequested(_)
+ | acp_thread::AcpThreadEvent::ToolAuthorizationReceived(_)
+ | acp_thread::AcpThreadEvent::Retry(_)
+ | acp_thread::AcpThreadEvent::Stopped(_)
+ | acp_thread::AcpThreadEvent::Error
+ | acp_thread::AcpThreadEvent::LoadError(_)
+ | acp_thread::AcpThreadEvent::Refusal => {
+ let metadata = ThreadMetadata::from_thread(&thread, cx);
self.save(metadata, cx).detach_and_log_err(cx);
}
_ => {}
}
}
-
- fn metadata_for_acp_thread(thread: &acp_thread::AcpThread, cx: &App) -> ThreadMetadata {
- let session_id = thread.session_id().clone();
- let title = thread.title();
- let updated_at = Utc::now();
-
- let agent_id = thread.connection().agent_id();
-
- let agent_id = if agent_id.as_ref() == ZED_AGENT_ID.as_ref() {
- None
- } else {
- Some(agent_id)
- };
-
- let folder_paths = {
- let project = thread.project().read(cx);
- let paths: Vec<Arc<Path>> = project
- .visible_worktrees(cx)
- .map(|worktree| worktree.read(cx).abs_path())
- .collect();
- PathList::new(&paths)
- };
-
- ThreadMetadata {
- session_id,
- agent_id,
- title,
- created_at: Some(updated_at), // handled by db `ON CONFLICT`
- updated_at,
- folder_paths,
- }
- }
}
-impl Global for ThreadMetadataStore {}
+impl Global for SidebarThreadMetadataStore {}
-#[derive(Clone)]
struct ThreadMetadataDb(ThreadSafeConnection);
impl Domain for ThreadMetadataDb {
@@ -270,9 +392,14 @@ impl Domain for ThreadMetadataDb {
)];
}
-db::static_connection!(THREAD_METADATA_DB, ThreadMetadataDb, []);
+db::static_connection!(ThreadMetadataDb, []);
impl ThreadMetadataDb {
+ pub fn is_empty(&self) -> anyhow::Result<bool> {
+ self.select::<i64>("SELECT COUNT(*) FROM sidebar_threads")?()
+ .map(|counts| counts.into_iter().next().unwrap_or_default() == 0)
+ }
+
/// List all sidebar thread metadata, ordered by updated_at descending.
pub fn list(&self) -> anyhow::Result<Vec<ThreadMetadata>> {
self.select::<ThreadMetadata>(
@@ -385,7 +512,6 @@ mod tests {
use project::Project;
use std::path::Path;
use std::rc::Rc;
- use util::path_list::PathList;
fn make_db_thread(title: &str, updated_at: DateTime<Utc>) -> DbThread {
DbThread {
@@ -408,51 +534,283 @@ mod tests {
}
}
+ fn make_metadata(
+ session_id: &str,
+ title: &str,
+ updated_at: DateTime<Utc>,
+ folder_paths: PathList,
+ ) -> ThreadMetadata {
+ ThreadMetadata {
+ session_id: acp::SessionId::new(session_id),
+ agent_id: None,
+ title: title.to_string().into(),
+ updated_at,
+ created_at: Some(updated_at),
+ folder_paths,
+ }
+ }
+
+ #[gpui::test]
+ async fn test_store_initializes_cache_from_database(cx: &mut TestAppContext) {
+ let first_paths = PathList::new(&[Path::new("/project-a")]);
+ let second_paths = PathList::new(&[Path::new("/project-b")]);
+ let now = Utc::now();
+ let older = now - chrono::Duration::seconds(1);
+
+ let thread = std::thread::current();
+ let test_name = thread.name().unwrap_or("unknown_test");
+ let db_name = format!("THREAD_METADATA_DB_{}", test_name);
+ let db = ThreadMetadataDb(smol::block_on(db::open_test_db::<ThreadMetadataDb>(
+ &db_name,
+ )));
+
+ db.save(make_metadata(
+ "session-1",
+ "First Thread",
+ now,
+ first_paths.clone(),
+ ))
+ .await
+ .unwrap();
+ db.save(make_metadata(
+ "session-2",
+ "Second Thread",
+ older,
+ second_paths.clone(),
+ ))
+ .await
+ .unwrap();
+
+ cx.update(|cx| {
+ let settings_store = settings::SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ SidebarThreadMetadataStore::init_global(cx);
+ });
+
+ cx.run_until_parked();
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ let store = store.read(cx);
+
+ let entry_ids = store
+ .entry_ids()
+ .map(|session_id| session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(entry_ids, vec!["session-1", "session-2"]);
+
+ let first_path_entries = store
+ .entries_for_path(&first_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(first_path_entries, vec!["session-1"]);
+
+ let second_path_entries = store
+ .entries_for_path(&second_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(second_path_entries, vec!["session-2"]);
+ });
+ }
+
+ #[gpui::test]
+ async fn test_store_cache_updates_after_save_and_delete(cx: &mut TestAppContext) {
+ cx.update(|cx| {
+ let settings_store = settings::SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ SidebarThreadMetadataStore::init_global(cx);
+ });
+
+ let first_paths = PathList::new(&[Path::new("/project-a")]);
+ let second_paths = PathList::new(&[Path::new("/project-b")]);
+ let initial_time = Utc::now();
+ let updated_time = initial_time + chrono::Duration::seconds(1);
+
+ let initial_metadata = make_metadata(
+ "session-1",
+ "First Thread",
+ initial_time,
+ first_paths.clone(),
+ );
+
+ let second_metadata = make_metadata(
+ "session-2",
+ "Second Thread",
+ initial_time,
+ second_paths.clone(),
+ );
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.update(cx, |store, cx| {
+ store.save(initial_metadata, cx).detach();
+ store.save(second_metadata, cx).detach();
+ });
+ });
+
+ cx.run_until_parked();
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ let store = store.read(cx);
+
+ let first_path_entries = store
+ .entries_for_path(&first_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(first_path_entries, vec!["session-1"]);
+
+ let second_path_entries = store
+ .entries_for_path(&second_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(second_path_entries, vec!["session-2"]);
+ });
+
+ let moved_metadata = make_metadata(
+ "session-1",
+ "First Thread",
+ updated_time,
+ second_paths.clone(),
+ );
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.update(cx, |store, cx| {
+ store.save(moved_metadata, cx).detach();
+ });
+ });
+
+ cx.run_until_parked();
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ let store = store.read(cx);
+
+ let entry_ids = store
+ .entry_ids()
+ .map(|session_id| session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(entry_ids, vec!["session-1", "session-2"]);
+
+ let first_path_entries = store
+ .entries_for_path(&first_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert!(first_path_entries.is_empty());
+
+ let second_path_entries = store
+ .entries_for_path(&second_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(second_path_entries, vec!["session-1", "session-2"]);
+ });
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.update(cx, |store, cx| {
+ store.delete(acp::SessionId::new("session-2"), cx).detach();
+ });
+ });
+
+ cx.run_until_parked();
+
+ cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ let store = store.read(cx);
+
+ let entry_ids = store
+ .entry_ids()
+ .map(|session_id| session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(entry_ids, vec!["session-1"]);
+
+ let second_path_entries = store
+ .entries_for_path(&second_paths)
+ .map(|entry| entry.session_id.0.to_string())
+ .collect::<Vec<_>>();
+ assert_eq!(second_path_entries, vec!["session-1"]);
+ });
+ }
+
#[gpui::test]
async fn test_migrate_thread_metadata(cx: &mut TestAppContext) {
cx.update(|cx| {
ThreadStore::init_global(cx);
- ThreadMetadataStore::init_global(cx);
+ SidebarThreadMetadataStore::init_global(cx);
});
- // Verify the list is empty before migration
- let metadata_list = cx.update(|cx| {
- let store = ThreadMetadataStore::global(cx);
- store.read(cx).list(cx)
+ // Verify the cache is empty before migration
+ let list = cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.read(cx).entries().collect::<Vec<_>>()
});
-
- let list = metadata_list.await.unwrap();
assert_eq!(list.len(), 0);
+ let project_a_paths = PathList::new(&[Path::new("/project-a")]);
+ let project_b_paths = PathList::new(&[Path::new("/project-b")]);
let now = Utc::now();
- // Populate the native ThreadStore via save_thread
- let save1 = cx.update(|cx| {
- let thread_store = ThreadStore::global(cx);
- thread_store.update(cx, |store, cx| {
- store.save_thread(
- acp::SessionId::new("session-1"),
- make_db_thread("Thread 1", now),
- PathList::default(),
- cx,
- )
- })
- });
- save1.await.unwrap();
- cx.run_until_parked();
+ for index in 0..12 {
+ let updated_at = now + chrono::Duration::seconds(index as i64);
+ let session_id = format!("project-a-session-{index}");
+ let title = format!("Project A Thread {index}");
+
+ let save_task = cx.update(|cx| {
+ let thread_store = ThreadStore::global(cx);
+ let session_id = session_id.clone();
+ let title = title.clone();
+ let project_a_paths = project_a_paths.clone();
+ thread_store.update(cx, |store, cx| {
+ store.save_thread(
+ acp::SessionId::new(session_id),
+ make_db_thread(&title, updated_at),
+ project_a_paths,
+ cx,
+ )
+ })
+ });
+ save_task.await.unwrap();
+ cx.run_until_parked();
+ }
- let save2 = cx.update(|cx| {
+ for index in 0..3 {
+ let updated_at = now + chrono::Duration::seconds(100 + index as i64);
+ let session_id = format!("project-b-session-{index}");
+ let title = format!("Project B Thread {index}");
+
+ let save_task = cx.update(|cx| {
+ let thread_store = ThreadStore::global(cx);
+ let session_id = session_id.clone();
+ let title = title.clone();
+ let project_b_paths = project_b_paths.clone();
+ thread_store.update(cx, |store, cx| {
+ store.save_thread(
+ acp::SessionId::new(session_id),
+ make_db_thread(&title, updated_at),
+ project_b_paths,
+ cx,
+ )
+ })
+ });
+ save_task.await.unwrap();
+ cx.run_until_parked();
+ }
+
+ let save_projectless = cx.update(|cx| {
let thread_store = ThreadStore::global(cx);
thread_store.update(cx, |store, cx| {
store.save_thread(
- acp::SessionId::new("session-2"),
- make_db_thread("Thread 2", now),
+ acp::SessionId::new("projectless-session"),
+ make_db_thread("Projectless Thread", now + chrono::Duration::seconds(200)),
PathList::default(),
cx,
)
})
});
- save2.await.unwrap();
+ save_projectless.await.unwrap();
cx.run_until_parked();
// Run migration
@@ -462,35 +820,80 @@ mod tests {
cx.run_until_parked();
- // Verify the metadata was migrated
- let metadata_list = cx.update(|cx| {
- let store = ThreadMetadataStore::global(cx);
- store.read(cx).list(cx)
+ // Verify the metadata was migrated, limited to 10 per project, and
+ // projectless threads were skipped.
+ let list = cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.read(cx).entries().collect::<Vec<_>>()
});
+ assert_eq!(list.len(), 13);
- let list = metadata_list.await.unwrap();
- assert_eq!(list.len(), 2);
+ assert!(
+ list.iter()
+ .all(|metadata| !metadata.folder_paths.is_empty())
+ );
+ assert!(
+ list.iter()
+ .all(|metadata| metadata.session_id.0.as_ref() != "projectless-session")
+ );
- let metadata1 = list
+ let project_a_entries = list
.iter()
- .find(|m| m.session_id.0.as_ref() == "session-1")
- .expect("session-1 should be in migrated metadata");
- assert_eq!(metadata1.title.as_ref(), "Thread 1");
- assert!(metadata1.agent_id.is_none());
+ .filter(|metadata| metadata.folder_paths == project_a_paths)
+ .collect::<Vec<_>>();
+ assert_eq!(project_a_entries.len(), 10);
+ assert_eq!(
+ project_a_entries
+ .iter()
+ .map(|metadata| metadata.session_id.0.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ "project-a-session-11",
+ "project-a-session-10",
+ "project-a-session-9",
+ "project-a-session-8",
+ "project-a-session-7",
+ "project-a-session-6",
+ "project-a-session-5",
+ "project-a-session-4",
+ "project-a-session-3",
+ "project-a-session-2",
+ ]
+ );
+ assert!(
+ project_a_entries
+ .iter()
+ .all(|metadata| metadata.agent_id.is_none())
+ );
- let metadata2 = list
+ let project_b_entries = list
.iter()
- .find(|m| m.session_id.0.as_ref() == "session-2")
- .expect("session-2 should be in migrated metadata");
- assert_eq!(metadata2.title.as_ref(), "Thread 2");
- assert!(metadata2.agent_id.is_none());
+ .filter(|metadata| metadata.folder_paths == project_b_paths)
+ .collect::<Vec<_>>();
+ assert_eq!(project_b_entries.len(), 3);
+ assert_eq!(
+ project_b_entries
+ .iter()
+ .map(|metadata| metadata.session_id.0.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ "project-b-session-2",
+ "project-b-session-1",
+ "project-b-session-0",
+ ]
+ );
+ assert!(
+ project_b_entries
+ .iter()
+ .all(|metadata| metadata.agent_id.is_none())
+ );
}
#[gpui::test]
async fn test_migrate_thread_metadata_skips_when_data_exists(cx: &mut TestAppContext) {
cx.update(|cx| {
ThreadStore::init_global(cx);
- ThreadMetadataStore::init_global(cx);
+ SidebarThreadMetadataStore::init_global(cx);
});
// Pre-populate the metadata store with existing data
@@ -504,7 +907,7 @@ mod tests {
};
cx.update(|cx| {
- let store = ThreadMetadataStore::global(cx);
+ let store = SidebarThreadMetadataStore::global(cx);
store.update(cx, |store, cx| {
store.save(existing_metadata, cx).detach();
});
@@ -535,12 +938,10 @@ mod tests {
cx.run_until_parked();
// Verify only the existing metadata is present (migration was skipped)
- let metadata_list = cx.update(|cx| {
- let store = ThreadMetadataStore::global(cx);
- store.read(cx).list(cx)
+ let list = cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.read(cx).entries().collect::<Vec<_>>()
});
-
- let list = metadata_list.await.unwrap();
assert_eq!(list.len(), 1);
assert_eq!(list[0].session_id.0.as_ref(), "existing-session");
}
@@ -552,7 +953,7 @@ mod tests {
cx.set_global(settings_store);
cx.update_flags(true, vec!["agent-v2".to_string()]);
ThreadStore::init_global(cx);
- ThreadMetadataStore::init_global(cx);
+ SidebarThreadMetadataStore::init_global(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -608,14 +1009,12 @@ mod tests {
});
cx.run_until_parked();
- // List all metadata from the store.
- let metadata_list = cx.update(|cx| {
- let store = ThreadMetadataStore::global(cx);
- store.read(cx).list(cx)
+ // List all metadata from the store cache.
+ let list = cx.update(|cx| {
+ let store = SidebarThreadMetadataStore::global(cx);
+ store.read(cx).entries().collect::<Vec<_>>()
});
- let list = metadata_list.await.unwrap();
-
// The subagent thread should NOT appear in the sidebar metadata.
// Only the regular thread should be listed.
assert_eq!(
@@ -7,7 +7,7 @@ use crate::{
use acp_thread::AgentSessionInfo;
use agent::ThreadStore;
use agent_client_protocol as acp;
-use chrono::{Datelike as _, Local, NaiveDate, TimeDelta, Utc};
+use chrono::{DateTime, Datelike as _, Local, NaiveDate, TimeDelta, Utc};
use editor::Editor;
use fs::Fs;
use gpui::{
@@ -19,11 +19,12 @@ use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious};
use project::{AgentId, AgentServerStore};
use theme::ActiveTheme;
use ui::{
- ButtonLike, CommonAnimationExt, ContextMenu, ContextMenuEntry, HighlightedLabel, ListItem,
- PopoverMenu, PopoverMenuHandle, Tab, TintColor, Tooltip, WithScrollbar, prelude::*,
+ ButtonLike, CommonAnimationExt, ContextMenu, ContextMenuEntry, Divider, HighlightedLabel,
+ KeyBinding, PopoverMenu, PopoverMenuHandle, TintColor, Tooltip, WithScrollbar, prelude::*,
utils::platform_title_bar_height,
};
use util::ResultExt as _;
+use zed_actions::agents_sidebar::FocusSidebarFilter;
use zed_actions::editor::{MoveDown, MoveUp};
#[derive(Clone)]
@@ -110,7 +111,7 @@ fn archive_empty_state_message(
pub enum ThreadsArchiveViewEvent {
Close,
- OpenThread {
+ Unarchive {
agent: Agent,
session_info: AgentSessionInfo,
},
@@ -162,6 +163,25 @@ impl ThreadsArchiveView {
}
});
+ let filter_focus_handle = filter_editor.read(cx).focus_handle(cx);
+ cx.on_focus_in(
+ &filter_focus_handle,
+ window,
+ |this: &mut Self, _window, cx| {
+ if this.selection.is_some() {
+ this.selection = None;
+ cx.notify();
+ }
+ },
+ )
+ .detach();
+
+ cx.on_focus_out(&focus_handle, window, |this: &mut Self, _, _window, cx| {
+ this.selection = None;
+ cx.notify();
+ })
+ .detach();
+
let mut this = Self {
agent_connection_store,
agent_server_store,
@@ -185,6 +205,19 @@ impl ThreadsArchiveView {
this
}
+ pub fn has_selection(&self) -> bool {
+ self.selection.is_some()
+ }
+
+ pub fn clear_selection(&mut self) {
+ self.selection = None;
+ }
+
+ pub fn focus_filter_editor(&self, window: &mut Window, cx: &mut App) {
+ let handle = self.filter_editor.read(cx).focus_handle(cx);
+ handle.focus(window, cx);
+ }
+
fn set_selected_agent(&mut self, agent: Agent, window: &mut Window, cx: &mut Context<Self>) {
self.selected_agent = agent.clone();
self.is_loading = true;
@@ -276,6 +309,8 @@ impl ThreadsArchiveView {
self.list_state.reset(items.len());
self.items = items;
+ self.selection = None;
+ self.hovered_index = None;
cx.notify();
}
@@ -285,12 +320,7 @@ impl ThreadsArchiveView {
});
}
- fn go_back(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- self.reset_filter_editor_text(window, cx);
- cx.emit(ThreadsArchiveViewEvent::Close);
- }
-
- fn open_thread(
+ fn unarchive_thread(
&mut self,
session_info: AgentSessionInfo,
window: &mut Window,
@@ -298,7 +328,7 @@ impl ThreadsArchiveView {
) {
self.selection = None;
self.reset_filter_editor_text(window, cx);
- cx.emit(ThreadsArchiveViewEvent::OpenThread {
+ cx.emit(ThreadsArchiveViewEvent::Unarchive {
agent: self.selected_agent.clone(),
session_info,
});
@@ -349,10 +379,16 @@ impl ThreadsArchiveView {
fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context<Self>) {
self.select_next(&SelectNext, window, cx);
+ if self.selection.is_some() {
+ self.focus_handle.focus(window, cx);
+ }
}
fn editor_move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context<Self>) {
self.select_previous(&SelectPrevious, window, cx);
+ if self.selection.is_some() {
+ self.focus_handle.focus(window, cx);
+ }
}
fn select_next(&mut self, _: &SelectNext, _window: &mut Window, cx: &mut Context<Self>) {
@@ -367,24 +403,29 @@ impl ThreadsArchiveView {
}
}
- fn select_previous(
- &mut self,
- _: &SelectPrevious,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let prev = match self.selection {
- Some(ix) if ix > 0 => self.find_previous_selectable(ix - 1),
+ fn select_previous(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context<Self>) {
+ match self.selection {
+ Some(ix) => {
+ if let Some(prev) = (ix > 0)
+ .then(|| self.find_previous_selectable(ix - 1))
+ .flatten()
+ {
+ self.selection = Some(prev);
+ self.list_state.scroll_to_reveal_item(prev);
+ } else {
+ self.selection = None;
+ self.focus_filter_editor(window, cx);
+ }
+ cx.notify();
+ }
None => {
let last = self.items.len().saturating_sub(1);
- self.find_previous_selectable(last)
+ if let Some(prev) = self.find_previous_selectable(last) {
+ self.selection = Some(prev);
+ self.list_state.scroll_to_reveal_item(prev);
+ cx.notify();
+ }
}
- _ => return,
- };
- if let Some(prev) = prev {
- self.selection = Some(prev);
- self.list_state.scroll_to_reveal_item(prev);
- cx.notify();
}
}
@@ -410,7 +451,13 @@ impl ThreadsArchiveView {
let Some(ArchiveListItem::Entry { session, .. }) = self.items.get(ix) else {
return;
};
- self.open_thread(session.clone(), window, cx);
+
+ let can_unarchive = session.work_dirs.as_ref().is_some_and(|p| !p.is_empty());
+ if !can_unarchive {
+ return;
+ }
+
+ self.unarchive_thread(session.clone(), window, cx);
}
fn render_list_entry(
@@ -426,7 +473,7 @@ impl ThreadsArchiveView {
match item {
ArchiveListItem::BucketSeparator(bucket) => div()
.w_full()
- .px_2()
+ .px_2p5()
.pt_3()
.pb_1()
.child(
@@ -439,75 +486,68 @@ impl ThreadsArchiveView {
session,
highlight_positions,
} => {
- let is_selected = self.selection == Some(ix);
+ let id = SharedString::from(format!("archive-entry-{}", ix));
+
+ let is_focused = self.selection == Some(ix);
let hovered = self.hovered_index == Some(ix);
+
+ let project_names = session.work_dirs.as_ref().and_then(|paths| {
+ let paths_str = paths
+ .paths()
+ .iter()
+ .filter_map(|p| p.file_name())
+ .filter_map(|name| name.to_str())
+ .join(", ");
+ if paths_str.is_empty() {
+ None
+ } else {
+ Some(paths_str)
+ }
+ });
+
+ let can_unarchive = session.work_dirs.as_ref().is_some_and(|p| !p.is_empty());
+
let supports_delete = self
.history
.as_ref()
.map(|h| h.read(cx).supports_delete())
.unwrap_or(false);
+
let title: SharedString =
session.title.clone().unwrap_or_else(|| "Untitled".into());
+
let session_info = session.clone();
let session_id_for_delete = session.session_id.clone();
let focus_handle = self.focus_handle.clone();
- let highlight_positions = highlight_positions.clone();
-
- let timestamp = session.created_at.or(session.updated_at).map(|entry_time| {
- let now = Utc::now();
- let duration = now.signed_duration_since(entry_time);
-
- let minutes = duration.num_minutes();
- let hours = duration.num_hours();
- let days = duration.num_days();
- let weeks = days / 7;
- let months = days / 30;
-
- if minutes < 60 {
- format!("{}m", minutes.max(1))
- } else if hours < 24 {
- format!("{}h", hours)
- } else if weeks < 4 {
- format!("{}w", weeks.max(1))
- } else {
- format!("{}mo", months.max(1))
- }
- });
- let id = SharedString::from(format!("archive-entry-{}", ix));
+ let timestamp = session
+ .created_at
+ .or(session.updated_at)
+ .map(format_history_entry_timestamp);
+ let highlight_positions = highlight_positions.clone();
let title_label = if highlight_positions.is_empty() {
- Label::new(title)
- .size(LabelSize::Small)
- .truncate()
- .into_any_element()
+ Label::new(title).truncate().flex_1().into_any_element()
} else {
HighlightedLabel::new(title, highlight_positions)
- .size(LabelSize::Small)
.truncate()
+ .flex_1()
.into_any_element()
};
- ListItem::new(id)
- .toggle_state(is_selected)
- .child(
- h_flex()
- .min_w_0()
- .w_full()
- .py_1()
- .pl_0p5()
- .pr_1p5()
- .gap_2()
- .justify_between()
- .child(title_label)
- .when(!(hovered && supports_delete), |this| {
- this.when_some(timestamp, |this, ts| {
- this.child(
- Label::new(ts).size(LabelSize::Small).color(Color::Muted),
- )
- })
- }),
- )
+ h_flex()
+ .id(id)
+ .min_w_0()
+ .w_full()
+ .px(DynamicSpacing::Base06.rems(cx))
+ .border_1()
+ .map(|this| {
+ if is_focused {
+ this.border_color(cx.theme().colors().border_focused)
+ } else {
+ this.border_color(gpui::transparent_black())
+ }
+ })
.on_hover(cx.listener(move |this, is_hovered, _window, cx| {
if *is_hovered {
this.hovered_index = Some(ix);
@@ -516,32 +556,108 @@ impl ThreadsArchiveView {
}
cx.notify();
}))
- .end_slot::<IconButton>(if hovered && supports_delete {
- Some(
- IconButton::new("delete-thread", IconName::Trash)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .tooltip({
- move |_window, cx| {
- Tooltip::for_action_in(
- "Delete Thread",
- &RemoveSelectedThread,
- &focus_handle,
- cx,
+ .child(
+ v_flex()
+ .min_w_0()
+ .w_full()
+ .p_1()
+ .child(
+ h_flex()
+ .min_w_0()
+ .w_full()
+ .gap_1()
+ .justify_between()
+ .child(title_label)
+ .when(hovered || is_focused, |this| {
+ this.child(
+ h_flex()
+ .gap_0p5()
+ .when(can_unarchive, |this| {
+ this.child(
+ Button::new("unarchive-thread", "Restore")
+ .style(ButtonStyle::Filled)
+ .label_size(LabelSize::Small)
+ .when(is_focused, |this| {
+ this.key_binding(
+ KeyBinding::for_action_in(
+ &menu::Confirm,
+ &focus_handle,
+ cx,
+ )
+ .map(|kb| {
+ kb.size(rems_from_px(12.))
+ }),
+ )
+ })
+ .on_click(cx.listener(
+ move |this, _, window, cx| {
+ this.unarchive_thread(
+ session_info.clone(),
+ window,
+ cx,
+ );
+ },
+ )),
+ )
+ })
+ .when(supports_delete, |this| {
+ this.child(
+ IconButton::new(
+ "delete-thread",
+ IconName::Trash,
+ )
+ .style(ButtonStyle::Filled)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .tooltip({
+ move |_window, cx| {
+ Tooltip::for_action_in(
+ "Delete Thread",
+ &RemoveSelectedThread,
+ &focus_handle,
+ cx,
+ )
+ }
+ })
+ .on_click(cx.listener(
+ move |this, _, _, cx| {
+ this.delete_thread(
+ &session_id_for_delete,
+ cx,
+ );
+ cx.stop_propagation();
+ },
+ )),
+ )
+ }),
)
- }
- })
- .on_click(cx.listener(move |this, _, _, cx| {
- this.delete_thread(&session_id_for_delete, cx);
- cx.stop_propagation();
- })),
- )
- } else {
- None
- })
- .on_click(cx.listener(move |this, _, window, cx| {
- this.open_thread(session_info.clone(), window, cx);
- }))
+ }),
+ )
+ .child(
+ h_flex()
+ .gap_1()
+ .when_some(timestamp, |this, ts| {
+ this.child(
+ Label::new(ts)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ })
+ .when_some(project_names, |this, project| {
+ this.child(
+ Label::new("•")
+ .size(LabelSize::Small)
+ .color(Color::Muted)
+ .alpha(0.5),
+ )
+ .child(
+ Label::new(project)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ }),
+ ),
+ )
.into_any_element()
}
}
@@ -681,61 +797,75 @@ impl ThreadsArchiveView {
let has_query = !self.filter_editor.read(cx).text(cx).is_empty();
let traffic_lights = cfg!(target_os = "macos") && !window.is_fullscreen();
let header_height = platform_title_bar_height(window);
-
- v_flex()
- .child(
- h_flex()
- .h(header_height)
- .mt_px()
- .pb_px()
- .when(traffic_lights, |this| {
- this.pl(px(ui::utils::TRAFFIC_LIGHT_PADDING))
- })
- .pr_1p5()
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .justify_between()
- .child(
- h_flex()
- .gap_1p5()
- .child(
- IconButton::new("back", IconName::ArrowLeft)
- .icon_size(IconSize::Small)
- .tooltip(Tooltip::text("Back to Sidebar"))
- .on_click(cx.listener(|this, _, window, cx| {
- this.go_back(window, cx);
- })),
- )
- .child(Label::new("Threads Archive").size(LabelSize::Small).mb_px()),
- )
- .child(self.render_agent_picker(cx)),
- )
+ let show_focus_keybinding =
+ self.selection.is_some() && !self.filter_editor.focus_handle(cx).is_focused(window);
+
+ h_flex()
+ .h(header_height)
+ .mt_px()
+ .pb_px()
+ .when(traffic_lights, |this| {
+ this.pl(px(ui::utils::TRAFFIC_LIGHT_PADDING))
+ })
+ .pr_1p5()
+ .gap_1()
+ .justify_between()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(Divider::vertical().color(ui::DividerColor::Border))
.child(
h_flex()
- .h(Tab::container_height(cx))
- .p_2()
- .pr_1p5()
- .gap_1p5()
- .border_b_1()
- .border_color(cx.theme().colors().border)
+ .ml_1()
+ .min_w_0()
+ .w_full()
+ .gap_1()
.child(
Icon::new(IconName::MagnifyingGlass)
.size(IconSize::Small)
.color(Color::Muted),
)
- .child(self.filter_editor.clone())
- .when(has_query, |this| {
- this.child(
- IconButton::new("clear_filter", IconName::Close)
- .icon_size(IconSize::Small)
- .tooltip(Tooltip::text("Clear Search"))
- .on_click(cx.listener(|this, _, window, cx| {
- this.reset_filter_editor_text(window, cx);
- this.update_items(cx);
- })),
- )
- }),
+ .child(self.filter_editor.clone()),
)
+ .when(show_focus_keybinding, |this| {
+ this.child(KeyBinding::for_action(&FocusSidebarFilter, cx))
+ })
+ .when(!has_query && !show_focus_keybinding, |this| {
+ this.child(self.render_agent_picker(cx))
+ })
+ .when(has_query, |this| {
+ this.child(
+ IconButton::new("clear_filter", IconName::Close)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("Clear Search"))
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.reset_filter_editor_text(window, cx);
+ this.update_items(cx);
+ })),
+ )
+ })
+ }
+}
+
+pub fn format_history_entry_timestamp(entry_time: DateTime<Utc>) -> String {
+ let now = Utc::now();
+ let duration = now.signed_duration_since(entry_time);
+
+ let minutes = duration.num_minutes();
+ let hours = duration.num_hours();
+ let days = duration.num_days();
+ let weeks = days / 7;
+ let months = days / 30;
+
+ if minutes < 60 {
+ format!("{}m", minutes.max(1))
+ } else if hours < 24 {
+ format!("{}h", hours.max(1))
+ } else if days < 7 {
+ format!("{}d", days.max(1))
+ } else if weeks < 4 {
+ format!("{}w", weeks.max(1))
+ } else {
+ format!("{}mo", months.max(1))
}
}
@@ -23,7 +23,6 @@ http_client.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
-settings.workspace = true
strum.workspace = true
thiserror.workspace = true
@@ -8,7 +8,6 @@ use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::B
use http_client::http::{self, HeaderMap, HeaderValue};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, StatusCode};
use serde::{Deserialize, Serialize};
-pub use settings::{AnthropicAvailableModel as AvailableModel, ModelMode};
use strum::{EnumIter, EnumString};
use thiserror::Error;
@@ -34,110 +33,84 @@ pub enum AnthropicModelMode {
Thinking {
budget_tokens: Option<u32>,
},
-}
-
-impl From<ModelMode> for AnthropicModelMode {
- fn from(value: ModelMode) -> Self {
- match value {
- ModelMode::Default => AnthropicModelMode::Default,
- ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
- }
- }
-}
-
-impl From<AnthropicModelMode> for ModelMode {
- fn from(value: AnthropicModelMode) -> Self {
- match value {
- AnthropicModelMode::Default => ModelMode::Default,
- AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
- }
- }
+ AdaptiveThinking,
}
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model {
- #[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
- ClaudeOpus4,
- #[serde(rename = "claude-opus-4-1", alias = "claude-opus-4-1-latest")]
- ClaudeOpus4_1,
#[serde(
- rename = "claude-opus-4-thinking",
+ rename = "claude-opus-4",
+ alias = "claude-opus-4-latest",
+ alias = "claude-opus-4-thinking",
alias = "claude-opus-4-thinking-latest"
)]
- ClaudeOpus4Thinking,
+ ClaudeOpus4,
#[serde(
- rename = "claude-opus-4-1-thinking",
+ rename = "claude-opus-4-1",
+ alias = "claude-opus-4-1-latest",
+ alias = "claude-opus-4-1-thinking",
alias = "claude-opus-4-1-thinking-latest"
)]
- ClaudeOpus4_1Thinking,
- #[serde(rename = "claude-opus-4-5", alias = "claude-opus-4-5-latest")]
- ClaudeOpus4_5,
+ ClaudeOpus4_1,
#[serde(
- rename = "claude-opus-4-5-thinking",
+ rename = "claude-opus-4-5",
+ alias = "claude-opus-4-5-latest",
+ alias = "claude-opus-4-5-thinking",
alias = "claude-opus-4-5-thinking-latest"
)]
- ClaudeOpus4_5Thinking,
+ ClaudeOpus4_5,
#[serde(
rename = "claude-opus-4-6",
alias = "claude-opus-4-6-latest",
alias = "claude-opus-4-6-1m-context",
- alias = "claude-opus-4-6-1m-context-latest"
- )]
- ClaudeOpus4_6,
- #[serde(
- rename = "claude-opus-4-6-thinking",
+ alias = "claude-opus-4-6-1m-context-latest",
+ alias = "claude-opus-4-6-thinking",
alias = "claude-opus-4-6-thinking-latest",
alias = "claude-opus-4-6-1m-context-thinking",
alias = "claude-opus-4-6-1m-context-thinking-latest"
)]
- ClaudeOpus4_6Thinking,
- #[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
- ClaudeSonnet4,
+ ClaudeOpus4_6,
#[serde(
- rename = "claude-sonnet-4-thinking",
+ rename = "claude-sonnet-4",
+ alias = "claude-sonnet-4-latest",
+ alias = "claude-sonnet-4-thinking",
alias = "claude-sonnet-4-thinking-latest"
)]
- ClaudeSonnet4Thinking,
- #[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
- ClaudeSonnet4_5,
+ ClaudeSonnet4,
#[serde(
- rename = "claude-sonnet-4-5-thinking",
+ rename = "claude-sonnet-4-5",
+ alias = "claude-sonnet-4-5-latest",
+ alias = "claude-sonnet-4-5-thinking",
alias = "claude-sonnet-4-5-thinking-latest"
)]
- ClaudeSonnet4_5Thinking,
+ ClaudeSonnet4_5,
#[serde(
rename = "claude-sonnet-4-5-1m-context",
- alias = "claude-sonnet-4-5-1m-context-latest"
- )]
- ClaudeSonnet4_5_1mContext,
- #[serde(
- rename = "claude-sonnet-4-5-1m-context-thinking",
+ alias = "claude-sonnet-4-5-1m-context-latest",
+ alias = "claude-sonnet-4-5-1m-context-thinking",
alias = "claude-sonnet-4-5-1m-context-thinking-latest"
)]
- ClaudeSonnet4_5_1mContextThinking,
+ ClaudeSonnet4_5_1mContext,
#[default]
#[serde(
rename = "claude-sonnet-4-6",
alias = "claude-sonnet-4-6-latest",
alias = "claude-sonnet-4-6-1m-context",
- alias = "claude-sonnet-4-6-1m-context-latest"
- )]
- ClaudeSonnet4_6,
- #[serde(
- rename = "claude-sonnet-4-6-thinking",
+ alias = "claude-sonnet-4-6-1m-context-latest",
+ alias = "claude-sonnet-4-6-thinking",
alias = "claude-sonnet-4-6-thinking-latest",
alias = "claude-sonnet-4-6-1m-context-thinking",
alias = "claude-sonnet-4-6-1m-context-thinking-latest"
)]
- ClaudeSonnet4_6Thinking,
- #[serde(rename = "claude-haiku-4-5", alias = "claude-haiku-4-5-latest")]
- ClaudeHaiku4_5,
+ ClaudeSonnet4_6,
#[serde(
- rename = "claude-haiku-4-5-thinking",
+ rename = "claude-haiku-4-5",
+ alias = "claude-haiku-4-5-latest",
+ alias = "claude-haiku-4-5-thinking",
alias = "claude-haiku-4-5-thinking-latest"
)]
- ClaudeHaiku4_5Thinking,
+ ClaudeHaiku4_5,
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
Claude3Haiku,
#[serde(rename = "custom")]
@@ -165,38 +138,14 @@ impl Model {
}
pub fn from_id(id: &str) -> Result<Self> {
- if id.starts_with("claude-opus-4-6-1m-context-thinking") {
- return Ok(Self::ClaudeOpus4_6Thinking);
- }
-
- if id.starts_with("claude-opus-4-6-1m-context") {
- return Ok(Self::ClaudeOpus4_6);
- }
-
- if id.starts_with("claude-opus-4-6-thinking") {
- return Ok(Self::ClaudeOpus4_6Thinking);
- }
-
if id.starts_with("claude-opus-4-6") {
return Ok(Self::ClaudeOpus4_6);
}
- if id.starts_with("claude-opus-4-5-thinking") {
- return Ok(Self::ClaudeOpus4_5Thinking);
- }
-
if id.starts_with("claude-opus-4-5") {
return Ok(Self::ClaudeOpus4_5);
}
- if id.starts_with("claude-opus-4-1-thinking") {
- return Ok(Self::ClaudeOpus4_1Thinking);
- }
-
- if id.starts_with("claude-opus-4-thinking") {
- return Ok(Self::ClaudeOpus4Thinking);
- }
-
if id.starts_with("claude-opus-4-1") {
return Ok(Self::ClaudeOpus4_1);
}
@@ -205,50 +154,22 @@ impl Model {
return Ok(Self::ClaudeOpus4);
}
- if id.starts_with("claude-sonnet-4-6-1m-context-thinking") {
- return Ok(Self::ClaudeSonnet4_6Thinking);
- }
-
- if id.starts_with("claude-sonnet-4-6-1m-context") {
- return Ok(Self::ClaudeSonnet4_6);
- }
-
- if id.starts_with("claude-sonnet-4-6-thinking") {
- return Ok(Self::ClaudeSonnet4_6Thinking);
- }
-
if id.starts_with("claude-sonnet-4-6") {
return Ok(Self::ClaudeSonnet4_6);
}
- if id.starts_with("claude-sonnet-4-5-1m-context-thinking") {
- return Ok(Self::ClaudeSonnet4_5_1mContextThinking);
- }
-
if id.starts_with("claude-sonnet-4-5-1m-context") {
return Ok(Self::ClaudeSonnet4_5_1mContext);
}
- if id.starts_with("claude-sonnet-4-5-thinking") {
- return Ok(Self::ClaudeSonnet4_5Thinking);
- }
-
if id.starts_with("claude-sonnet-4-5") {
return Ok(Self::ClaudeSonnet4_5);
}
- if id.starts_with("claude-sonnet-4-thinking") {
- return Ok(Self::ClaudeSonnet4Thinking);
- }
-
if id.starts_with("claude-sonnet-4") {
return Ok(Self::ClaudeSonnet4);
}
- if id.starts_with("claude-haiku-4-5-thinking") {
- return Ok(Self::ClaudeHaiku4_5Thinking);
- }
-
if id.starts_with("claude-haiku-4-5") {
return Ok(Self::ClaudeHaiku4_5);
}
@@ -264,24 +185,13 @@ impl Model {
match self {
Self::ClaudeOpus4 => "claude-opus-4-latest",
Self::ClaudeOpus4_1 => "claude-opus-4-1-latest",
- Self::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
- Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking-latest",
Self::ClaudeOpus4_5 => "claude-opus-4-5-latest",
- Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-thinking-latest",
Self::ClaudeOpus4_6 => "claude-opus-4-6-latest",
- Self::ClaudeOpus4_6Thinking => "claude-opus-4-6-thinking-latest",
Self::ClaudeSonnet4 => "claude-sonnet-4-latest",
- Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
Self::ClaudeSonnet4_5 => "claude-sonnet-4-5-latest",
- Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking-latest",
Self::ClaudeSonnet4_5_1mContext => "claude-sonnet-4-5-1m-context-latest",
- Self::ClaudeSonnet4_5_1mContextThinking => {
- "claude-sonnet-4-5-1m-context-thinking-latest"
- }
Self::ClaudeSonnet4_6 => "claude-sonnet-4-6-latest",
- Self::ClaudeSonnet4_6Thinking => "claude-sonnet-4-6-thinking-latest",
Self::ClaudeHaiku4_5 => "claude-haiku-4-5-latest",
- Self::ClaudeHaiku4_5Thinking => "claude-haiku-4-5-thinking-latest",
Self::Claude3Haiku => "claude-3-haiku-20240307",
Self::Custom { name, .. } => name,
}
@@ -290,17 +200,14 @@ impl Model {
/// The id of the model that should be used for making API requests
pub fn request_id(&self) -> &str {
match self {
- Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514",
- Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-20250805",
- Self::ClaudeOpus4_5 | Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-20251101",
- Self::ClaudeOpus4_6 | Self::ClaudeOpus4_6Thinking => "claude-opus-4-6",
- Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
- Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
- | Self::ClaudeSonnet4_5_1mContext
- | Self::ClaudeSonnet4_5_1mContextThinking => "claude-sonnet-4-5-20250929",
- Self::ClaudeSonnet4_6 | Self::ClaudeSonnet4_6Thinking => "claude-sonnet-4-6",
- Self::ClaudeHaiku4_5 | Self::ClaudeHaiku4_5Thinking => "claude-haiku-4-5-20251001",
+ Self::ClaudeOpus4 => "claude-opus-4-20250514",
+ Self::ClaudeOpus4_1 => "claude-opus-4-1-20250805",
+ Self::ClaudeOpus4_5 => "claude-opus-4-5-20251101",
+ Self::ClaudeOpus4_6 => "claude-opus-4-6",
+ Self::ClaudeSonnet4 => "claude-sonnet-4-20250514",
+ Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5_1mContext => "claude-sonnet-4-5-20250929",
+ Self::ClaudeSonnet4_6 => "claude-sonnet-4-6",
+ Self::ClaudeHaiku4_5 => "claude-haiku-4-5-20251001",
Self::Claude3Haiku => "claude-3-haiku-20240307",
Self::Custom { name, .. } => name,
}
@@ -310,22 +217,13 @@ impl Model {
match self {
Self::ClaudeOpus4 => "Claude Opus 4",
Self::ClaudeOpus4_1 => "Claude Opus 4.1",
- Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
- Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
Self::ClaudeOpus4_5 => "Claude Opus 4.5",
- Self::ClaudeOpus4_5Thinking => "Claude Opus 4.5 Thinking",
Self::ClaudeOpus4_6 => "Claude Opus 4.6",
- Self::ClaudeOpus4_6Thinking => "Claude Opus 4.6 Thinking",
Self::ClaudeSonnet4 => "Claude Sonnet 4",
- Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
- Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
Self::ClaudeSonnet4_5_1mContext => "Claude Sonnet 4.5 (1M context)",
- Self::ClaudeSonnet4_5_1mContextThinking => "Claude Sonnet 4.5 Thinking (1M context)",
Self::ClaudeSonnet4_6 => "Claude Sonnet 4.6",
- Self::ClaudeSonnet4_6Thinking => "Claude Sonnet 4.6 Thinking",
Self::ClaudeHaiku4_5 => "Claude Haiku 4.5",
- Self::ClaudeHaiku4_5Thinking => "Claude Haiku 4.5 Thinking",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::Custom {
name, display_name, ..
@@ -337,22 +235,13 @@ impl Model {
match self {
Self::ClaudeOpus4
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4Thinking
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeSonnet4_5_1mContext
- | Self::ClaudeSonnet4_5_1mContextThinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::ClaudeHaiku4_5
- | Self::ClaudeHaiku4_5Thinking
| Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
min_total_token: 2_048,
should_speculate: true,
@@ -369,46 +258,28 @@ impl Model {
match self {
Self::ClaudeOpus4
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4Thinking
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeHaiku4_5
- | Self::ClaudeHaiku4_5Thinking
| Self::Claude3Haiku => 200_000,
- Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_5_1mContext
- | Self::ClaudeSonnet4_5_1mContextThinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => 1_000_000,
+ Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_5_1mContext | Self::ClaudeSonnet4_6 => {
+ 1_000_000
+ }
Self::Custom { max_tokens, .. } => *max_tokens,
}
}
pub fn max_output_tokens(&self) -> u64 {
match self {
- Self::ClaudeOpus4
- | Self::ClaudeOpus4Thinking
- | Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking => 32_000,
+ Self::ClaudeOpus4 | Self::ClaudeOpus4_1 => 32_000,
Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeSonnet4_5_1mContext
- | Self::ClaudeSonnet4_5_1mContextThinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
- | Self::ClaudeHaiku4_5
- | Self::ClaudeHaiku4_5Thinking => 64_000,
- Self::ClaudeOpus4_6 | Self::ClaudeOpus4_6Thinking => 128_000,
+ | Self::ClaudeHaiku4_5 => 64_000,
+ Self::ClaudeOpus4_6 => 128_000,
Self::Claude3Haiku => 4_096,
Self::Custom {
max_output_tokens, ..
@@ -420,22 +291,13 @@ impl Model {
match self {
Self::ClaudeOpus4
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4Thinking
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeSonnet4_5_1mContext
- | Self::ClaudeSonnet4_5_1mContextThinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::ClaudeHaiku4_5
- | Self::ClaudeHaiku4_5Thinking
| Self::Claude3Haiku => 1.0,
Self::Custom {
default_temperature,
@@ -445,37 +307,41 @@ impl Model {
}
pub fn mode(&self) -> AnthropicModelMode {
- match self {
- Self::ClaudeOpus4
- | Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_6
- | Self::ClaudeSonnet4
- | Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5_1mContext
- | Self::ClaudeSonnet4_6
- | Self::ClaudeHaiku4_5
- | Self::Claude3Haiku => AnthropicModelMode::Default,
- Self::ClaudeOpus4Thinking
- | Self::ClaudeOpus4_1Thinking
- | Self::ClaudeOpus4_5Thinking
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4Thinking
- | Self::ClaudeSonnet4_5Thinking
- | Self::ClaudeSonnet4_5_1mContextThinking
- | Self::ClaudeSonnet4_6Thinking
- | Self::ClaudeHaiku4_5Thinking => AnthropicModelMode::Thinking {
+ if self.supports_adaptive_thinking() {
+ AnthropicModelMode::AdaptiveThinking
+ } else if self.supports_thinking() {
+ AnthropicModelMode::Thinking {
budget_tokens: Some(4_096),
- },
- Self::Custom { mode, .. } => mode.clone(),
+ }
+ } else {
+ AnthropicModelMode::Default
}
}
+ pub fn supports_thinking(&self) -> bool {
+ matches!(
+ self,
+ Self::ClaudeOpus4
+ | Self::ClaudeOpus4_1
+ | Self::ClaudeOpus4_5
+ | Self::ClaudeOpus4_6
+ | Self::ClaudeSonnet4
+ | Self::ClaudeSonnet4_5
+ | Self::ClaudeSonnet4_5_1mContext
+ | Self::ClaudeSonnet4_6
+ | Self::ClaudeHaiku4_5
+ )
+ }
+
+ pub fn supports_adaptive_thinking(&self) -> bool {
+ matches!(self, Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6)
+ }
+
pub fn beta_headers(&self) -> Option<String> {
let mut headers = vec![];
match self {
- Self::ClaudeSonnet4_5_1mContext | Self::ClaudeSonnet4_5_1mContextThinking => {
+ Self::ClaudeSonnet4_5_1mContext => {
headers.push(CONTEXT_1M_BETA_HEADER.to_string());
}
Self::Custom {
@@ -8,6 +8,7 @@ pub const CHANNEL_COUNT: ChannelCount = nz!(2);
mod audio_settings;
pub use audio_settings::AudioSettings;
+pub use audio_settings::LIVE_SETTINGS;
mod audio_pipeline;
pub use audio_pipeline::{Audio, VoipParts};
@@ -71,8 +71,8 @@ impl Settings for AudioSettings {
}
/// See docs on [LIVE_SETTINGS]
-pub(crate) struct LiveSettings {
- pub(crate) auto_microphone_volume: AtomicBool,
+pub struct LiveSettings {
+ pub auto_microphone_volume: AtomicBool,
pub(crate) auto_speaker_volume: AtomicBool,
pub(crate) denoise: AtomicBool,
}
@@ -128,7 +128,7 @@ impl LiveSettings {
/// observer of SettingsStore. Needed because audio playback and recording are
/// real time and must each run in a dedicated OS thread, therefore we can not
/// use the background executor.
-pub(crate) static LIVE_SETTINGS: LiveSettings = LiveSettings {
+pub static LIVE_SETTINGS: LiveSettings = LiveSettings {
auto_microphone_volume: AtomicBool::new(true),
auto_speaker_volume: AtomicBool::new(true),
denoise: AtomicBool::new(true),
@@ -1,6 +1,6 @@
use anyhow::{Context as _, Result};
use client::Client;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use futures_lite::StreamExt;
use gpui::{
App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, Task, Window,
@@ -30,9 +30,64 @@ use util::command::new_command;
use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
+
+#[derive(Debug)]
+struct MissingDependencyError(String);
+
+impl std::fmt::Display for MissingDependencyError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
+impl std::error::Error for MissingDependencyError {}
const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
const REMOTE_SERVER_CACHE_LIMIT: usize = 5;
+#[cfg(target_os = "linux")]
+fn linux_rsync_install_hint() -> &'static str {
+ let os_release = match std::fs::read_to_string("/etc/os-release") {
+ Ok(os_release) => os_release,
+ Err(_) => return "Please install rsync using your package manager",
+ };
+
+ let mut distribution_ids = Vec::new();
+ for line in os_release.lines() {
+ let trimmed = line.trim();
+ if let Some(value) = trimmed.strip_prefix("ID=") {
+ distribution_ids.push(value.trim_matches('"').to_ascii_lowercase());
+ } else if let Some(value) = trimmed.strip_prefix("ID_LIKE=") {
+ for id in value.trim_matches('"').split_whitespace() {
+ distribution_ids.push(id.to_ascii_lowercase());
+ }
+ }
+ }
+
+ let package_manager_hint = if distribution_ids
+ .iter()
+ .any(|distribution_id| distribution_id == "arch")
+ {
+ Some("Install it with: sudo pacman -S rsync")
+ } else if distribution_ids
+ .iter()
+ .any(|distribution_id| distribution_id == "debian" || distribution_id == "ubuntu")
+ {
+ Some("Install it with: sudo apt install rsync")
+ } else if distribution_ids.iter().any(|distribution_id| {
+ distribution_id == "fedora"
+ || distribution_id == "rhel"
+ || distribution_id == "centos"
+ || distribution_id == "rocky"
+ || distribution_id == "almalinux"
+ }) {
+ Some("Install it with: sudo dnf install rsync")
+ } else {
+ None
+ };
+
+ package_manager_hint.unwrap_or("Please install rsync using your package manager")
+}
+
actions!(
auto_update,
[
@@ -397,7 +452,15 @@ impl AutoUpdater {
this.update(cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
+ let is_missing_dependency =
+ error.downcast_ref::<MissingDependencyError>().is_some();
this.status = match check_type {
+ UpdateCheckType::Automatic if is_missing_dependency => {
+ log::warn!("auto-update: {}", error);
+ AutoUpdateStatus::Errored {
+ error: Arc::new(error),
+ }
+ }
// Be quiet if the check was automated (e.g. when offline)
UpdateCheckType::Automatic => {
log::info!("auto-update check failed: error:{:?}", error);
@@ -629,9 +692,13 @@ impl AutoUpdater {
cx.notify();
});
- let installer_dir = InstallerDir::new().await?;
+ let installer_dir = InstallerDir::new()
+ .await
+ .context("Failed to create installer dir")?;
let target_path = Self::target_path(&installer_dir).await?;
- download_release(&target_path, fetched_release_data, client).await?;
+ download_release(&target_path, fetched_release_data, client)
+ .await
+ .with_context(|| format!("Failed to download update to {}", target_path.display()))?;
this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Installing {
@@ -640,7 +707,9 @@ impl AutoUpdater {
cx.notify();
});
- let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?;
+ let new_binary_path = Self::install_release(installer_dir, &target_path, cx)
+ .await
+ .with_context(|| format!("Failed to install update at: {}", target_path.display()))?;
if let Some(new_binary_path) = new_binary_path {
cx.update(|cx| cx.set_restart_path(new_binary_path));
}
@@ -709,11 +778,21 @@ impl AutoUpdater {
}
fn check_dependencies() -> Result<()> {
- #[cfg(not(target_os = "windows"))]
+ #[cfg(target_os = "linux")]
+ if which::which("rsync").is_err() {
+ let install_hint = linux_rsync_install_hint();
+ return Err(MissingDependencyError(format!(
+ "rsync is required for auto-updates but is not installed. {install_hint}"
+ ))
+ .into());
+ }
+
+ #[cfg(target_os = "macos")]
anyhow::ensure!(
which::which("rsync").is_ok(),
"Could not auto-update because the required rsync utility was not found."
);
+
Ok(())
}
@@ -730,7 +809,7 @@ impl AutoUpdater {
async fn install_release(
installer_dir: InstallerDir,
- target_path: PathBuf,
+ target_path: &Path,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
#[cfg(test)]
@@ -764,17 +843,16 @@ impl AutoUpdater {
should_show: bool,
cx: &App,
) -> Task<Result<()>> {
+ let kvp = KeyValueStore::global(cx);
cx.background_spawn(async move {
if should_show {
- KEY_VALUE_STORE
- .write_kvp(
- SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
- "".to_string(),
- )
- .await?;
+ kvp.write_kvp(
+ SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
+ "".to_string(),
+ )
+ .await?;
} else {
- KEY_VALUE_STORE
- .delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
+ kvp.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
.await?;
}
Ok(())
@@ -782,10 +860,9 @@ impl AutoUpdater {
}
pub fn should_show_update_notification(&self, cx: &App) -> Task<Result<bool>> {
+ let kvp = KeyValueStore::global(cx);
cx.background_spawn(async move {
- Ok(KEY_VALUE_STORE
- .read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
- .is_some())
+ Ok(kvp.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?.is_some())
})
}
}
@@ -888,7 +965,7 @@ async fn download_release(
async fn install_release_linux(
temp_dir: &InstallerDir,
- downloaded_tar_gz: PathBuf,
+ downloaded_tar_gz: &Path,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name());
@@ -900,13 +977,15 @@ async fn install_release_linux(
.await
.context("failed to create directory into which to extract update")?;
- let output = new_command("tar")
- .arg("-xzf")
+ let mut cmd = new_command("tar");
+ cmd.arg("-xzf")
.arg(&downloaded_tar_gz)
.arg("-C")
- .arg(&extracted)
+ .arg(&extracted);
+ let output = cmd
.output()
- .await?;
+ .await
+ .with_context(|| "failed to extract: {cmd}")?;
anyhow::ensure!(
output.status.success(),
@@ -935,12 +1014,12 @@ async fn install_release_linux(
to = PathBuf::from(prefix);
}
- let output = new_command("rsync")
- .args(["-av", "--delete"])
- .arg(&from)
- .arg(&to)
+ let mut cmd = new_command("rsync");
+ cmd.args(["-av", "--delete"]).arg(&from).arg(&to);
+ let output = cmd
.output()
- .await?;
+ .await
+ .with_context(|| "failed to rsync: {cmd}")?;
anyhow::ensure!(
output.status.success(),
@@ -955,7 +1034,7 @@ async fn install_release_linux(
async fn install_release_macos(
temp_dir: &InstallerDir,
- downloaded_dmg: PathBuf,
+ downloaded_dmg: &Path,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
let running_app_path = cx.update(|cx| cx.app_path())?;
@@ -967,13 +1046,15 @@ async fn install_release_macos(
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
mounted_app_path.push("/");
- let output = new_command("hdiutil")
- .args(["attach", "-nobrowse"])
+ let mut cmd = new_command("hdiutil");
+ cmd.args(["attach", "-nobrowse"])
.arg(&downloaded_dmg)
.arg("-mountroot")
- .arg(temp_dir.path())
+ .arg(temp_dir.path());
+ let output = cmd
.output()
- .await?;
+ .await
+ .with_context(|| "failed to mount: {cmd}")?;
anyhow::ensure!(
output.status.success(),
@@ -987,12 +1068,14 @@ async fn install_release_macos(
background_executor: cx.background_executor(),
};
- let output = new_command("rsync")
- .args(["-av", "--delete", "--exclude", "Icon?"])
+ let mut cmd = new_command("rsync");
+ cmd.args(["-av", "--delete", "--exclude", "Icon?"])
.arg(&mounted_app_path)
- .arg(&running_app_path)
+ .arg(&running_app_path);
+ let output = cmd
.output()
- .await?;
+ .await
+ .with_context(|| "failed to rsync: {cmd}")?;
anyhow::ensure!(
output.status.success(),
@@ -1017,14 +1100,13 @@ async fn cleanup_windows() -> Result<()> {
Ok(())
}
-async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option<PathBuf>> {
- let output = new_command(downloaded_installer)
- .arg("/verysilent")
+async fn install_release_windows(downloaded_installer: &Path) -> Result<Option<PathBuf>> {
+ let mut cmd = new_command(downloaded_installer);
+ cmd.arg("/verysilent")
.arg("/update=true")
.arg("!desktopicon")
- .arg("!quicklaunchicon")
- .output()
- .await?;
+ .arg("!quicklaunchicon");
+ let output = cmd.output().await?;
anyhow::ensure!(
output.status.success(),
"failed to start installer: {:?}",
@@ -1089,9 +1171,7 @@ mod tests {
use super::*;
- pub(super) struct InstallOverride(
- pub Rc<dyn Fn(PathBuf, &AsyncApp) -> Result<Option<PathBuf>>>,
- );
+ pub(super) struct InstallOverride(pub Rc<dyn Fn(&Path, &AsyncApp) -> Result<Option<PathBuf>>>);
impl Global for InstallOverride {}
#[gpui::test]
@@ -48,49 +48,49 @@ pub enum Model {
// Anthropic Claude 4+ models
#[serde(rename = "claude-haiku-4-5", alias = "claude-haiku-4-5-latest")]
ClaudeHaiku4_5,
- #[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
- ClaudeSonnet4,
#[serde(
- rename = "claude-sonnet-4-thinking",
+ rename = "claude-sonnet-4",
+ alias = "claude-sonnet-4-latest",
+ alias = "claude-sonnet-4-thinking",
alias = "claude-sonnet-4-thinking-latest"
)]
- ClaudeSonnet4Thinking,
+ ClaudeSonnet4,
#[default]
- #[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
- ClaudeSonnet4_5,
#[serde(
- rename = "claude-sonnet-4-5-thinking",
+ rename = "claude-sonnet-4-5",
+ alias = "claude-sonnet-4-5-latest",
+ alias = "claude-sonnet-4-5-thinking",
alias = "claude-sonnet-4-5-thinking-latest"
)]
- ClaudeSonnet4_5Thinking,
- #[serde(rename = "claude-opus-4-1", alias = "claude-opus-4-1-latest")]
- ClaudeOpus4_1,
+ ClaudeSonnet4_5,
#[serde(
- rename = "claude-opus-4-1-thinking",
+ rename = "claude-opus-4-1",
+ alias = "claude-opus-4-1-latest",
+ alias = "claude-opus-4-1-thinking",
alias = "claude-opus-4-1-thinking-latest"
)]
- ClaudeOpus4_1Thinking,
- #[serde(rename = "claude-opus-4-5", alias = "claude-opus-4-5-latest")]
- ClaudeOpus4_5,
+ ClaudeOpus4_1,
#[serde(
- rename = "claude-opus-4-5-thinking",
+ rename = "claude-opus-4-5",
+ alias = "claude-opus-4-5-latest",
+ alias = "claude-opus-4-5-thinking",
alias = "claude-opus-4-5-thinking-latest"
)]
- ClaudeOpus4_5Thinking,
- #[serde(rename = "claude-opus-4-6", alias = "claude-opus-4-6-latest")]
- ClaudeOpus4_6,
+ ClaudeOpus4_5,
#[serde(
- rename = "claude-opus-4-6-thinking",
+ rename = "claude-opus-4-6",
+ alias = "claude-opus-4-6-latest",
+ alias = "claude-opus-4-6-thinking",
alias = "claude-opus-4-6-thinking-latest"
)]
- ClaudeOpus4_6Thinking,
- #[serde(rename = "claude-sonnet-4-6", alias = "claude-sonnet-4-6-latest")]
- ClaudeSonnet4_6,
+ ClaudeOpus4_6,
#[serde(
- rename = "claude-sonnet-4-6-thinking",
+ rename = "claude-sonnet-4-6",
+ alias = "claude-sonnet-4-6-latest",
+ alias = "claude-sonnet-4-6-thinking",
alias = "claude-sonnet-4-6-thinking-latest"
)]
- ClaudeSonnet4_6Thinking,
+ ClaudeSonnet4_6,
// Meta Llama 4 models
#[serde(rename = "llama-4-scout-17b")]
@@ -181,28 +181,16 @@ impl Model {
}
pub fn from_id(id: &str) -> anyhow::Result<Self> {
- if id.starts_with("claude-opus-4-6-thinking") {
- Ok(Self::ClaudeOpus4_6Thinking)
- } else if id.starts_with("claude-opus-4-6") {
+ if id.starts_with("claude-opus-4-6") {
Ok(Self::ClaudeOpus4_6)
- } else if id.starts_with("claude-opus-4-5-thinking") {
- Ok(Self::ClaudeOpus4_5Thinking)
} else if id.starts_with("claude-opus-4-5") {
Ok(Self::ClaudeOpus4_5)
- } else if id.starts_with("claude-opus-4-1-thinking") {
- Ok(Self::ClaudeOpus4_1Thinking)
} else if id.starts_with("claude-opus-4-1") {
Ok(Self::ClaudeOpus4_1)
- } else if id.starts_with("claude-sonnet-4-6-thinking") {
- Ok(Self::ClaudeSonnet4_6Thinking)
} else if id.starts_with("claude-sonnet-4-6") {
Ok(Self::ClaudeSonnet4_6)
- } else if id.starts_with("claude-sonnet-4-5-thinking") {
- Ok(Self::ClaudeSonnet4_5Thinking)
} else if id.starts_with("claude-sonnet-4-5") {
Ok(Self::ClaudeSonnet4_5)
- } else if id.starts_with("claude-sonnet-4-thinking") {
- Ok(Self::ClaudeSonnet4Thinking)
} else if id.starts_with("claude-sonnet-4") {
Ok(Self::ClaudeSonnet4)
} else if id.starts_with("claude-haiku-4-5") {
@@ -216,17 +204,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5 => "claude-haiku-4-5",
Self::ClaudeSonnet4 => "claude-sonnet-4",
- Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking",
Self::ClaudeSonnet4_5 => "claude-sonnet-4-5",
- Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking",
Self::ClaudeOpus4_1 => "claude-opus-4-1",
- Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking",
Self::ClaudeOpus4_5 => "claude-opus-4-5",
- Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-thinking",
Self::ClaudeOpus4_6 => "claude-opus-4-6",
- Self::ClaudeOpus4_6Thinking => "claude-opus-4-6-thinking",
Self::ClaudeSonnet4_6 => "claude-sonnet-4-6",
- Self::ClaudeSonnet4_6Thinking => "claude-sonnet-4-6-thinking",
Self::Llama4Scout17B => "llama-4-scout-17b",
Self::Llama4Maverick17B => "llama-4-maverick-17b",
Self::Gemma3_4B => "gemma-3-4b",
@@ -261,20 +243,12 @@ impl Model {
pub fn request_id(&self) -> &str {
match self {
Self::ClaudeHaiku4_5 => "anthropic.claude-haiku-4-5-20251001-v1:0",
- Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => {
- "anthropic.claude-sonnet-4-20250514-v1:0"
- }
- Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => {
- "anthropic.claude-sonnet-4-5-20250929-v1:0"
- }
- Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => {
- "anthropic.claude-opus-4-1-20250805-v1:0"
- }
- Self::ClaudeOpus4_5 | Self::ClaudeOpus4_5Thinking => {
- "anthropic.claude-opus-4-5-20251101-v1:0"
- }
- Self::ClaudeOpus4_6 | Self::ClaudeOpus4_6Thinking => "anthropic.claude-opus-4-6-v1",
- Self::ClaudeSonnet4_6 | Self::ClaudeSonnet4_6Thinking => "anthropic.claude-sonnet-4-6",
+ Self::ClaudeSonnet4 => "anthropic.claude-sonnet-4-20250514-v1:0",
+ Self::ClaudeSonnet4_5 => "anthropic.claude-sonnet-4-5-20250929-v1:0",
+ Self::ClaudeOpus4_1 => "anthropic.claude-opus-4-1-20250805-v1:0",
+ Self::ClaudeOpus4_5 => "anthropic.claude-opus-4-5-20251101-v1:0",
+ Self::ClaudeOpus4_6 => "anthropic.claude-opus-4-6-v1",
+ Self::ClaudeSonnet4_6 => "anthropic.claude-sonnet-4-6",
Self::Llama4Scout17B => "meta.llama4-scout-17b-instruct-v1:0",
Self::Llama4Maverick17B => "meta.llama4-maverick-17b-instruct-v1:0",
Self::Gemma3_4B => "google.gemma-3-4b-it",
@@ -310,17 +284,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5 => "Claude Haiku 4.5",
Self::ClaudeSonnet4 => "Claude Sonnet 4",
- Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
- Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
Self::ClaudeOpus4_1 => "Claude Opus 4.1",
- Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
Self::ClaudeOpus4_5 => "Claude Opus 4.5",
- Self::ClaudeOpus4_5Thinking => "Claude Opus 4.5 Thinking",
Self::ClaudeOpus4_6 => "Claude Opus 4.6",
- Self::ClaudeOpus4_6Thinking => "Claude Opus 4.6 Thinking",
Self::ClaudeSonnet4_6 => "Claude Sonnet 4.6",
- Self::ClaudeSonnet4_6Thinking => "Claude Sonnet 4.6 Thinking",
Self::Llama4Scout17B => "Llama 4 Scout 17B",
Self::Llama4Maverick17B => "Llama 4 Maverick 17B",
Self::Gemma3_4B => "Gemma 3 4B",
@@ -362,17 +330,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => 200_000,
+ | Self::ClaudeSonnet4_6 => 200_000,
Self::Llama4Scout17B | Self::Llama4Maverick17B => 128_000,
Self::Gemma3_4B | Self::Gemma3_12B | Self::Gemma3_27B => 128_000,
Self::MagistralSmall | Self::MistralLarge3 | Self::PixtralLarge => 128_000,
@@ -397,15 +359,12 @@ impl Model {
pub fn max_output_tokens(&self) -> u64 {
match self {
Self::ClaudeHaiku4_5
+ | Self::ClaudeSonnet4
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => 64_000,
- Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
- Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => 32_000,
- Self::ClaudeOpus4_6 | Self::ClaudeOpus4_6Thinking => 128_000,
+ | Self::ClaudeSonnet4_6 => 64_000,
+ Self::ClaudeOpus4_1 => 32_000,
+ Self::ClaudeOpus4_6 => 128_000,
Self::Llama4Scout17B
| Self::Llama4Maverick17B
| Self::Gemma3_4B
@@ -436,17 +395,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => 1.0,
+ | Self::ClaudeSonnet4_6 => 1.0,
Self::Custom {
default_temperature,
..
@@ -459,17 +412,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => true,
+ | Self::ClaudeSonnet4_6 => true,
Self::NovaLite | Self::NovaPro | Self::NovaPremier | Self::Nova2Lite => true,
Self::MistralLarge3 | Self::PixtralLarge | Self::MagistralSmall => true,
// Gemma accepts toolConfig without error but produces unreliable tool
@@ -492,17 +439,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => true,
+ | Self::ClaudeSonnet4_6 => true,
Self::NovaLite | Self::NovaPro => true,
Self::PixtralLarge => true,
Self::Qwen3VL235B => true,
@@ -515,15 +456,10 @@ impl Model {
matches!(
self,
Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
)
}
@@ -531,17 +467,11 @@ impl Model {
match self {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => true,
+ | Self::ClaudeSonnet4_6 => true,
Self::Custom {
cache_configuration,
..
@@ -553,17 +483,11 @@ impl Model {
pub fn cache_configuration(&self) -> Option<BedrockModelCacheConfiguration> {
match self {
Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking => Some(BedrockModelCacheConfiguration {
+ | Self::ClaudeSonnet4_6 => Some(BedrockModelCacheConfiguration {
max_cache_anchors: 4,
min_total_token: 1024,
}),
@@ -579,25 +503,34 @@ impl Model {
}
}
- pub fn mode(&self) -> BedrockModelMode {
- match self {
- Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5Thinking => {
- BedrockModelMode::Thinking {
- budget_tokens: Some(4096),
- }
+ pub fn supports_thinking(&self) -> bool {
+ matches!(
+ self,
+ Self::ClaudeHaiku4_5
+ | Self::ClaudeSonnet4
+ | Self::ClaudeSonnet4_5
+ | Self::ClaudeOpus4_1
+ | Self::ClaudeOpus4_5
+ | Self::ClaudeOpus4_6
+ | Self::ClaudeSonnet4_6
+ )
+ }
+
+ pub fn supports_adaptive_thinking(&self) -> bool {
+ matches!(self, Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6)
+ }
+
+ pub fn thinking_mode(&self) -> BedrockModelMode {
+ if self.supports_adaptive_thinking() {
+ BedrockModelMode::AdaptiveThinking {
+ effort: BedrockAdaptiveThinkingEffort::default(),
}
- Self::ClaudeOpus4_1Thinking | Self::ClaudeOpus4_5Thinking => {
- BedrockModelMode::Thinking {
- budget_tokens: Some(4096),
- }
+ } else if self.supports_thinking() {
+ BedrockModelMode::Thinking {
+ budget_tokens: Some(4096),
}
- Self::ClaudeOpus4_6Thinking => BedrockModelMode::AdaptiveThinking {
- effort: BedrockAdaptiveThinkingEffort::default(),
- },
- Self::ClaudeSonnet4_6Thinking => BedrockModelMode::AdaptiveThinking {
- effort: BedrockAdaptiveThinkingEffort::default(),
- },
- _ => BedrockModelMode::Default,
+ } else {
+ BedrockModelMode::Default
}
}
@@ -612,15 +545,10 @@ impl Model {
self,
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::Nova2Lite
);
@@ -676,39 +604,26 @@ impl Model {
(
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::Nova2Lite,
"global",
) => Ok(format!("{}.{}", region_group, model_id)),
// US Government region inference profiles
- (Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking, "us-gov") => {
- Ok(format!("{}.{}", region_group, model_id))
- }
+ (Self::ClaudeSonnet4_5, "us-gov") => Ok(format!("{}.{}", region_group, model_id)),
// US region inference profiles
(
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_1
- | Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
- | Self::ClaudeOpus4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::Llama4Scout17B
| Self::Llama4Maverick17B
| Self::NovaLite
@@ -728,11 +643,8 @@ impl Model {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::NovaLite
| Self::NovaPro
| Self::Nova2Lite,
@@ -743,11 +655,8 @@ impl Model {
(
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4_6
- | Self::ClaudeOpus4_6Thinking
- | Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking,
+ | Self::ClaudeSonnet4_6,
"au",
) => Ok(format!("{}.{}", region_group, model_id)),
@@ -755,9 +664,7 @@ impl Model {
(
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::ClaudeSonnet4_6
- | Self::ClaudeSonnet4_6Thinking
| Self::Nova2Lite,
"jp",
) => Ok(format!("{}.{}", region_group, model_id)),
@@ -767,7 +674,6 @@ impl Model {
Self::ClaudeHaiku4_5
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4_5
- | Self::ClaudeSonnet4_5Thinking
| Self::NovaLite
| Self::NovaPro
| Self::Nova2Lite,
@@ -889,7 +795,7 @@ mod tests {
"us-gov.anthropic.claude-sonnet-4-5-20250929-v1:0"
);
assert_eq!(
- Model::ClaudeSonnet4_5Thinking.cross_region_inference_id("us-gov-west-1", false)?,
+ Model::ClaudeSonnet4_5.cross_region_inference_id("us-gov-west-1", false)?,
"us-gov.anthropic.claude-sonnet-4-5-20250929-v1:0"
);
Ok(())
@@ -996,33 +902,43 @@ mod tests {
"meta.llama4-scout-17b-instruct-v1:0"
);
- // Thinking models have different friendly IDs but same request IDs
+ // Thinking aliases deserialize to the same model
assert_eq!(Model::ClaudeSonnet4.id(), "claude-sonnet-4");
assert_eq!(
- Model::ClaudeSonnet4Thinking.id(),
- "claude-sonnet-4-thinking"
- );
- assert_eq!(
- Model::ClaudeSonnet4.request_id(),
- Model::ClaudeSonnet4Thinking.request_id()
+ Model::from_id("claude-sonnet-4-thinking").unwrap().id(),
+ "claude-sonnet-4"
);
}
#[test]
- fn test_model_modes() {
- assert_eq!(Model::ClaudeSonnet4.mode(), BedrockModelMode::Default);
+ fn test_thinking_modes() {
+ assert!(Model::ClaudeHaiku4_5.supports_thinking());
+ assert!(Model::ClaudeSonnet4.supports_thinking());
+ assert!(Model::ClaudeSonnet4_5.supports_thinking());
+ assert!(Model::ClaudeOpus4_6.supports_thinking());
+
+ assert!(!Model::ClaudeSonnet4.supports_adaptive_thinking());
+ assert!(Model::ClaudeOpus4_6.supports_adaptive_thinking());
+ assert!(Model::ClaudeSonnet4_6.supports_adaptive_thinking());
+
assert_eq!(
- Model::ClaudeSonnet4Thinking.mode(),
+ Model::ClaudeSonnet4.thinking_mode(),
BedrockModelMode::Thinking {
budget_tokens: Some(4096)
}
);
assert_eq!(
- Model::ClaudeOpus4_6Thinking.mode(),
+ Model::ClaudeOpus4_6.thinking_mode(),
BedrockModelMode::AdaptiveThinking {
effort: BedrockAdaptiveThinkingEffort::High
}
);
+ assert_eq!(
+ Model::ClaudeHaiku4_5.thinking_mode(),
+ BedrockModelMode::Thinking {
+ budget_tokens: Some(4096)
+ }
+ );
}
#[test]
@@ -19,7 +19,8 @@ test-support = [
"gpui/test-support",
"livekit_client/test-support",
"project/test-support",
- "util/test-support"
+ "util/test-support",
+ "workspace/test-support"
]
[dependencies]
@@ -51,5 +52,6 @@ gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
+workspace = { workspace = true, features = ["test-support"] }
livekit_client = { workspace = true, features = ["test-support"] }
@@ -0,0 +1,232 @@
+use gpui::{Context, Task, WeakEntity};
+use livekit_client::ConnectionQuality;
+use std::time::Duration;
+
+use super::room::Room;
+
+#[derive(Clone, Default)]
+pub struct CallStats {
+ pub connection_quality: Option<ConnectionQuality>,
+ pub effective_quality: Option<ConnectionQuality>,
+ pub latency_ms: Option<f64>,
+ pub jitter_ms: Option<f64>,
+ pub packet_loss_pct: Option<f64>,
+ pub input_lag: Option<Duration>,
+}
+
+pub struct CallDiagnostics {
+ stats: CallStats,
+ room: WeakEntity<Room>,
+ poll_task: Option<Task<()>>,
+ stats_update_task: Option<Task<()>>,
+}
+
+impl CallDiagnostics {
+ pub fn new(room: WeakEntity<Room>, cx: &mut Context<Self>) -> Self {
+ let mut this = Self {
+ stats: CallStats::default(),
+ room,
+ poll_task: None,
+ stats_update_task: None,
+ };
+ this.start_polling(cx);
+ this
+ }
+
+ pub fn stats(&self) -> &CallStats {
+ &self.stats
+ }
+
+ fn start_polling(&mut self, cx: &mut Context<Self>) {
+ self.poll_task = Some(cx.spawn(async move |this, cx| {
+ loop {
+ if this.update(cx, |this, cx| this.poll_stats(cx)).is_err() {
+ break;
+ }
+ cx.background_executor().timer(Duration::from_secs(1)).await;
+ }
+ }));
+ }
+
+ fn poll_stats(&mut self, cx: &mut Context<Self>) {
+ let Some(room) = self.room.upgrade() else {
+ return;
+ };
+
+ let connection_quality = room.read(cx).connection_quality();
+ self.stats.connection_quality = Some(connection_quality);
+ self.stats.input_lag = room.read(cx).input_lag();
+
+ let stats_future = room.read(cx).get_stats(cx);
+
+ let background_task = cx.background_executor().spawn(async move {
+ let session_stats = stats_future.await;
+ session_stats.map(|stats| compute_network_stats(&stats))
+ });
+
+ self.stats_update_task = Some(cx.spawn(async move |this, cx| {
+ let result = background_task.await;
+ this.update(cx, |this, cx| {
+ if let Some(computed) = result {
+ this.stats.latency_ms = computed.latency_ms;
+ this.stats.jitter_ms = computed.jitter_ms;
+ this.stats.packet_loss_pct = computed.packet_loss_pct;
+ }
+ let quality = this
+ .stats
+ .connection_quality
+ .unwrap_or(ConnectionQuality::Lost);
+ this.stats.effective_quality =
+ Some(effective_connection_quality(quality, &this.stats));
+ cx.notify();
+ })
+ .ok();
+ }));
+ }
+}
+
+struct ComputedNetworkStats {
+ latency_ms: Option<f64>,
+ jitter_ms: Option<f64>,
+ packet_loss_pct: Option<f64>,
+}
+
+fn compute_network_stats(stats: &livekit_client::SessionStats) -> ComputedNetworkStats {
+ let mut min_rtt: Option<f64> = None;
+ let mut max_jitter: Option<f64> = None;
+ let mut total_packets_received: u64 = 0;
+ let mut total_packets_lost: i64 = 0;
+
+ let all_stats = stats
+ .publisher_stats
+ .iter()
+ .chain(stats.subscriber_stats.iter());
+
+ for stat in all_stats {
+ extract_metrics(
+ stat,
+ &mut min_rtt,
+ &mut max_jitter,
+ &mut total_packets_received,
+ &mut total_packets_lost,
+ );
+ }
+
+ let total_expected = total_packets_received as i64 + total_packets_lost;
+ let packet_loss_pct = if total_expected > 0 {
+ Some((total_packets_lost as f64 / total_expected as f64) * 100.0)
+ } else {
+ None
+ };
+
+ ComputedNetworkStats {
+ latency_ms: min_rtt.map(|rtt| rtt * 1000.0),
+ jitter_ms: max_jitter.map(|j| j * 1000.0),
+ packet_loss_pct,
+ }
+}
+
+#[cfg(all(
+ not(rust_analyzer),
+ any(
+ test,
+ feature = "test-support",
+ all(target_os = "windows", target_env = "gnu"),
+ target_os = "freebsd"
+ )
+))]
+fn extract_metrics(
+ _stat: &livekit_client::RtcStats,
+ _min_rtt: &mut Option<f64>,
+ _max_jitter: &mut Option<f64>,
+ _total_packets_received: &mut u64,
+ _total_packets_lost: &mut i64,
+) {
+}
+
+#[cfg(any(
+ rust_analyzer,
+ not(any(
+ test,
+ feature = "test-support",
+ all(target_os = "windows", target_env = "gnu"),
+ target_os = "freebsd"
+ ))
+))]
+fn extract_metrics(
+ stat: &livekit_client::RtcStats,
+ min_rtt: &mut Option<f64>,
+ max_jitter: &mut Option<f64>,
+ total_packets_received: &mut u64,
+ total_packets_lost: &mut i64,
+) {
+ use livekit_client::RtcStats;
+
+ match stat {
+ RtcStats::CandidatePair(pair) => {
+ let rtt = pair.candidate_pair.current_round_trip_time;
+ if rtt > 0.0 {
+ *min_rtt = Some(match *min_rtt {
+ Some(current) => current.min(rtt),
+ None => rtt,
+ });
+ }
+ }
+ RtcStats::InboundRtp(inbound) => {
+ let jitter = inbound.received.jitter;
+ if jitter > 0.0 {
+ *max_jitter = Some(match *max_jitter {
+ Some(current) => current.max(jitter),
+ None => jitter,
+ });
+ }
+ *total_packets_received += inbound.received.packets_received;
+ *total_packets_lost += inbound.received.packets_lost;
+ }
+ RtcStats::RemoteInboundRtp(remote_inbound) => {
+ let rtt = remote_inbound.remote_inbound.round_trip_time;
+ if rtt > 0.0 {
+ *min_rtt = Some(match *min_rtt {
+ Some(current) => current.min(rtt),
+ None => rtt,
+ });
+ }
+ }
+ _ => {}
+ }
+}
+
+fn metric_quality(value: f64, warn_threshold: f64, error_threshold: f64) -> ConnectionQuality {
+ if value < warn_threshold {
+ ConnectionQuality::Excellent
+ } else if value < error_threshold {
+ ConnectionQuality::Poor
+ } else {
+ ConnectionQuality::Lost
+ }
+}
+
+/// Computes the effective connection quality by taking the worst of the
+/// LiveKit-reported quality and each individual metric rating.
+fn effective_connection_quality(
+ livekit_quality: ConnectionQuality,
+ stats: &CallStats,
+) -> ConnectionQuality {
+ let mut worst = livekit_quality;
+
+ if let Some(latency) = stats.latency_ms {
+ worst = worst.max(metric_quality(latency, 100.0, 300.0));
+ }
+ if let Some(jitter) = stats.jitter_ms {
+ worst = worst.max(metric_quality(jitter, 30.0, 75.0));
+ }
+ if let Some(loss) = stats.packet_loss_pct {
+ worst = worst.max(metric_quality(loss, 1.0, 5.0));
+ }
+ if let Some(lag) = stats.input_lag {
+ let lag_ms = lag.as_secs_f64() * 1000.0;
+ worst = worst.max(metric_quality(lag_ms, 20.0, 50.0));
+ }
+
+ worst
+}
@@ -1,3 +1,4 @@
+pub mod diagnostics;
pub mod participant;
pub mod room;
@@ -23,7 +23,10 @@ use livekit_client::{self as livekit, AudioStream, TrackSid};
use postage::{sink::Sink, stream::Stream, watch};
use project::Project;
use settings::Settings as _;
+use std::sync::atomic::AtomicU64;
use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant};
+
+use super::diagnostics::CallDiagnostics;
use util::{ResultExt, TryFutureExt, paths::PathStyle, post_inc};
use workspace::ParticipantLocation;
@@ -69,6 +72,7 @@ pub struct Room {
id: u64,
channel_id: Option<ChannelId>,
live_kit: Option<LiveKitRoom>,
+ diagnostics: Option<Entity<CallDiagnostics>>,
status: RoomStatus,
shared_projects: HashSet<WeakEntity<Project>>,
joined_projects: HashSet<WeakEntity<Project>>,
@@ -136,6 +140,7 @@ impl Room {
id,
channel_id,
live_kit: None,
+ diagnostics: None,
status: RoomStatus::Online,
shared_projects: Default::default(),
joined_projects: Default::default(),
@@ -350,6 +355,7 @@ impl Room {
self.participant_user_ids.clear();
self.client_subscriptions.clear();
self.live_kit.take();
+ self.diagnostics.take();
self.pending_room_update.take();
self.maintain_connection.take();
}
@@ -540,6 +546,42 @@ impl Room {
}
}
+ pub fn get_stats(&self, cx: &App) -> Task<Option<livekit::SessionStats>> {
+ match self.live_kit.as_ref() {
+ Some(lk) => {
+ let task = lk.room.stats_task(cx);
+ cx.background_executor()
+ .spawn(async move { task.await.ok() })
+ }
+ None => Task::ready(None),
+ }
+ }
+
+ pub fn input_lag(&self) -> Option<Duration> {
+ let us = self
+ .live_kit
+ .as_ref()?
+ .input_lag_us
+ .as_ref()?
+ .load(std::sync::atomic::Ordering::Relaxed);
+ if us > 0 {
+ Some(Duration::from_micros(us))
+ } else {
+ None
+ }
+ }
+
+ pub fn diagnostics(&self) -> Option<&Entity<CallDiagnostics>> {
+ self.diagnostics.as_ref()
+ }
+
+ pub fn connection_quality(&self) -> livekit::ConnectionQuality {
+ self.live_kit
+ .as_ref()
+ .map(|lk| lk.room.local_participant().connection_quality())
+ .unwrap_or(livekit::ConnectionQuality::Lost)
+ }
+
pub fn status(&self) -> RoomStatus {
self.status
}
@@ -1383,7 +1425,7 @@ impl Room {
};
match publication {
- Ok((publication, stream)) => {
+ Ok((publication, stream, input_lag_us)) => {
if canceled {
cx.spawn(async move |_, cx| {
room.unpublish_local_track(publication.sid(), cx).await
@@ -1393,6 +1435,7 @@ impl Room {
if live_kit.muted_by_user || live_kit.deafened {
publication.mute(cx);
}
+ live_kit.input_lag_us = Some(input_lag_us);
live_kit.microphone_track = LocalTrack::Published {
track_publication: publication,
_stream: Box::new(stream),
@@ -1486,6 +1529,84 @@ impl Room {
})
}
+ #[cfg(target_os = "linux")]
+ pub fn share_screen_wayland(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
+ log::info!("will screenshare on wayland");
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ }
+ if self.is_sharing_screen() {
+ return Task::ready(Err(anyhow!("screen was already shared")));
+ }
+
+ let (participant, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() {
+ let publish_id = post_inc(&mut live_kit.next_publish_id);
+ live_kit.screen_track = LocalTrack::Pending { publish_id };
+ cx.notify();
+ (live_kit.room.local_participant(), publish_id)
+ } else {
+ return Task::ready(Err(anyhow!("live-kit was not initialized")));
+ };
+
+ cx.spawn(async move |this, cx| {
+ let publication = participant.publish_screenshare_track_wayland(cx).await;
+
+ this.update(cx, |this, cx| {
+ let live_kit = this
+ .live_kit
+ .as_mut()
+ .context("live-kit was not initialized")?;
+
+ let canceled = if let LocalTrack::Pending {
+ publish_id: cur_publish_id,
+ } = &live_kit.screen_track
+ {
+ *cur_publish_id != publish_id
+ } else {
+ true
+ };
+
+ match publication {
+ Ok((publication, stream, failure_rx)) => {
+ if canceled {
+ cx.spawn(async move |_, cx| {
+ participant.unpublish_track(publication.sid(), cx).await
+ })
+ .detach()
+ } else {
+ cx.spawn(async move |this, cx| {
+ if failure_rx.await.is_ok() {
+ log::warn!("Wayland capture died, auto-unsharing screen");
+ let _ =
+ this.update(cx, |this, cx| this.unshare_screen(false, cx));
+ }
+ })
+ .detach();
+
+ live_kit.screen_track = LocalTrack::Published {
+ track_publication: publication,
+ _stream: stream,
+ };
+ cx.notify();
+ }
+
+ Audio::play_sound(Sound::StartScreenshare, cx);
+ Ok(())
+ }
+ Err(error) => {
+ if canceled {
+ Ok(())
+ } else {
+ live_kit.screen_track = LocalTrack::None;
+ cx.notify();
+ Err(error)
+ }
+ }
+ }
+ })?
+ })
+ }
+
pub fn toggle_mute(&mut self, cx: &mut Context<Self>) {
if let Some(live_kit) = self.live_kit.as_mut() {
// When unmuting, undeafen if the user was deafened before.
@@ -1623,6 +1744,7 @@ fn spawn_room_connection(
livekit::Room::connect(connection_info.server_url, connection_info.token, cx)
.await?;
+ let weak_room = this.clone();
this.update(cx, |this, cx| {
let _handle_updates = cx.spawn(async move |this, cx| {
while let Some(event) = events.next().await {
@@ -1642,12 +1764,14 @@ fn spawn_room_connection(
room: Rc::new(room),
screen_track: LocalTrack::None,
microphone_track: LocalTrack::None,
+ input_lag_us: None,
next_publish_id: 0,
muted_by_user,
deafened: false,
speaking: false,
_handle_updates,
});
+ this.diagnostics = Some(cx.new(|cx| CallDiagnostics::new(weak_room, cx)));
if !muted_by_user && this.can_use_microphone() {
this.share_microphone(cx)
@@ -1665,6 +1789,9 @@ struct LiveKitRoom {
room: Rc<livekit::Room>,
screen_track: LocalTrack<dyn ScreenCaptureStream>,
microphone_track: LocalTrack<AudioStream>,
+ /// Shared atomic storing the most recent input lag measurement in microseconds.
+ /// Written by the audio capture/transmit pipeline, read here for diagnostics.
+ input_lag_us: Option<Arc<AtomicU64>>,
/// Tracks whether we're currently in a muted state due to auto-mute from deafening or manual mute performed by user.
muted_by_user: bool,
deafened: bool,
@@ -1681,6 +1808,7 @@ impl LiveKitRoom {
} = mem::replace(&mut self.microphone_track, LocalTrack::None)
{
tracks_to_unpublish.push(track_publication.sid());
+ self.input_lag_us = None;
cx.notify();
}
@@ -1388,7 +1388,11 @@ impl Client {
// Start an HTTP server to receive the redirect from Zed's sign-in page.
let server = tiny_http::Server::http("127.0.0.1:0")
.map_err(|e| anyhow!(e).context("failed to bind callback port"))?;
- let port = server.server_addr().port();
+ let port = server
+ .server_addr()
+ .to_ip()
+ .context("server not bound to a TCP address")?
+ .port();
// Open the Zed sign-in page in the user's browser, with query parameters that indicate
// that the user is signing in from a Zed app running on the same device.
@@ -129,7 +129,7 @@ pub fn os_version() -> String {
{
use objc2_foundation::NSProcessInfo;
let process_info = NSProcessInfo::processInfo();
- let version_nsstring = unsafe { process_info.operatingSystemVersionString() };
+ let version_nsstring = process_info.operatingSystemVersionString();
// "Version 15.6.1 (Build 24G90)" -> "15.6.1 (Build 24G90)"
let version_string = version_nsstring.to_string().replace("Version ", "");
// "15.6.1 (Build 24G90)" -> "15.6.1"
@@ -111,7 +111,8 @@ pub struct PredictEditsBody {
pub trigger: PredictEditsRequestTrigger,
}
-#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize)]
+#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, strum::AsRefStr)]
+#[strum(serialize_all = "snake_case")]
pub enum PredictEditsRequestTrigger {
Testing,
Diagnostics,
@@ -170,7 +171,10 @@ pub struct EditPredictionRejection {
pub e2e_latency_ms: Option<u128>,
}
-#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
+#[derive(
+ Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash, strum::AsRefStr,
+)]
+#[strum(serialize_all = "snake_case")]
pub enum EditPredictionRejectReason {
/// New requests were triggered before this one completed
Canceled,
@@ -215,7 +215,7 @@ async fn test_remote_git_worktrees(
repo_b.update(cx, |repository, _| {
repository.create_worktree(
"feature-branch".to_string(),
- worktree_directory.clone(),
+ worktree_directory.join("feature-branch"),
Some("abc123".to_string()),
)
})
@@ -235,7 +235,10 @@ async fn test_remote_git_worktrees(
assert_eq!(worktrees.len(), 2);
assert_eq!(worktrees[0].path, PathBuf::from(path!("/project")));
assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch"));
- assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+ assert_eq!(
+ worktrees[1].ref_name,
+ Some("refs/heads/feature-branch".into())
+ );
assert_eq!(worktrees[1].sha.as_ref(), "abc123");
// Verify from the host side that the worktree was actually created
@@ -266,7 +269,7 @@ async fn test_remote_git_worktrees(
repo_b.update(cx, |repository, _| {
repository.create_worktree(
"bugfix-branch".to_string(),
- worktree_directory.clone(),
+ worktree_directory.join("bugfix-branch"),
None,
)
})
@@ -287,7 +290,7 @@ async fn test_remote_git_worktrees(
let feature_worktree = worktrees
.iter()
- .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch")
+ .find(|worktree| worktree.ref_name == Some("refs/heads/feature-branch".into()))
.expect("should find feature-branch worktree");
assert_eq!(
feature_worktree.path,
@@ -296,7 +299,7 @@ async fn test_remote_git_worktrees(
let bugfix_worktree = worktrees
.iter()
- .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch")
+ .find(|worktree| worktree.ref_name == Some("refs/heads/bugfix-branch".into()))
.expect("should find bugfix-branch worktree");
assert_eq!(
bugfix_worktree.path,
@@ -396,17 +399,17 @@ async fn test_linked_worktrees_sync(
.with_git_state(Path::new(path!("/project/.git")), true, |state| {
state.worktrees.push(GitWorktree {
path: PathBuf::from(path!("/project")),
- ref_name: "refs/heads/main".into(),
+ ref_name: Some("refs/heads/main".into()),
sha: "aaa111".into(),
});
state.worktrees.push(GitWorktree {
path: PathBuf::from(path!("/project/feature-branch")),
- ref_name: "refs/heads/feature-branch".into(),
+ ref_name: Some("refs/heads/feature-branch".into()),
sha: "bbb222".into(),
});
state.worktrees.push(GitWorktree {
path: PathBuf::from(path!("/project/bugfix-branch")),
- ref_name: "refs/heads/bugfix-branch".into(),
+ ref_name: Some("refs/heads/bugfix-branch".into()),
sha: "ccc333".into(),
});
})
@@ -434,15 +437,18 @@ async fn test_linked_worktrees_sync(
PathBuf::from(path!("/project/feature-branch"))
);
assert_eq!(
- host_linked[0].ref_name.as_ref(),
- "refs/heads/feature-branch"
+ host_linked[0].ref_name,
+ Some("refs/heads/feature-branch".into())
);
assert_eq!(host_linked[0].sha.as_ref(), "bbb222");
assert_eq!(
host_linked[1].path,
PathBuf::from(path!("/project/bugfix-branch"))
);
- assert_eq!(host_linked[1].ref_name.as_ref(), "refs/heads/bugfix-branch");
+ assert_eq!(
+ host_linked[1].ref_name,
+ Some("refs/heads/bugfix-branch".into())
+ );
assert_eq!(host_linked[1].sha.as_ref(), "ccc333");
// Share the project and have client B join.
@@ -472,7 +478,7 @@ async fn test_linked_worktrees_sync(
.with_git_state(Path::new(path!("/project/.git")), true, |state| {
state.worktrees.push(GitWorktree {
path: PathBuf::from(path!("/project/hotfix-branch")),
- ref_name: "refs/heads/hotfix-branch".into(),
+ ref_name: Some("refs/heads/hotfix-branch".into()),
sha: "ddd444".into(),
});
})
@@ -514,7 +520,7 @@ async fn test_linked_worktrees_sync(
.with_git_state(Path::new(path!("/project/.git")), true, |state| {
state
.worktrees
- .retain(|wt| wt.ref_name.as_ref() != "refs/heads/bugfix-branch");
+ .retain(|wt| wt.ref_name != Some("refs/heads/bugfix-branch".into()));
})
.unwrap();
@@ -534,7 +540,7 @@ async fn test_linked_worktrees_sync(
assert!(
host_linked_after_removal
.iter()
- .all(|wt| wt.ref_name.as_ref() != "refs/heads/bugfix-branch"),
+ .all(|wt| wt.ref_name != Some("refs/heads/bugfix-branch".into())),
"bugfix-branch should have been removed"
);
@@ -1787,6 +1787,7 @@ async fn test_project_reconnect(
// While disconnected, close project 3
cx_a.update(|_| drop(project_a3));
+ executor.run_until_parked();
// Client B reconnects. They re-join the room and the remaining shared project.
server.allow_connections();
@@ -6595,6 +6596,151 @@ async fn test_join_call_after_screen_was_shared(
});
}
+#[cfg(target_os = "linux")]
+#[gpui::test(iterations = 10)]
+async fn test_share_screen_wayland(
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(executor.clone()).await;
+
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let active_call_b = cx_b.read(ActiveCall::global);
+
+ // User A calls user B.
+ active_call_a
+ .update(cx_a, |call, cx| {
+ call.invite(client_b.user_id().unwrap(), None, cx)
+ })
+ .await
+ .unwrap();
+
+ // User B accepts.
+ let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
+ executor.run_until_parked();
+ incoming_call_b.next().await.unwrap().unwrap();
+ active_call_b
+ .update(cx_b, |call, cx| call.accept_incoming(cx))
+ .await
+ .unwrap();
+
+ let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
+ let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
+ executor.run_until_parked();
+
+ // User A shares their screen via the Wayland path.
+ let events_b = active_call_events(cx_b);
+ active_call_a
+ .update(cx_a, |call, cx| {
+ call.room()
+ .unwrap()
+ .update(cx, |room, cx| room.share_screen_wayland(cx))
+ })
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+
+ // Room A is sharing and has a nonzero synthetic screen ID.
+ room_a.read_with(cx_a, |room, _| {
+ assert!(room.is_sharing_screen());
+ let screen_id = room.shared_screen_id();
+ assert!(screen_id.is_some(), "shared_screen_id should be Some");
+ assert_ne!(screen_id.unwrap(), 0, "synthetic ID must be nonzero");
+ });
+
+ // User B observes the remote screen sharing track.
+ assert_eq!(events_b.borrow().len(), 1);
+ if let call::room::Event::RemoteVideoTracksChanged { participant_id } =
+ events_b.borrow().first().unwrap()
+ {
+ assert_eq!(*participant_id, client_a.peer_id().unwrap());
+ room_b.read_with(cx_b, |room, _| {
+ assert_eq!(
+ room.remote_participants()[&client_a.user_id().unwrap()]
+ .video_tracks
+ .len(),
+ 1
+ );
+ });
+ } else {
+ panic!("expected RemoteVideoTracksChanged event");
+ }
+}
+
+#[cfg(target_os = "linux")]
+#[gpui::test(iterations = 10)]
+async fn test_unshare_screen_wayland(
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(executor.clone()).await;
+
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let active_call_b = cx_b.read(ActiveCall::global);
+
+ // User A calls user B.
+ active_call_a
+ .update(cx_a, |call, cx| {
+ call.invite(client_b.user_id().unwrap(), None, cx)
+ })
+ .await
+ .unwrap();
+
+ // User B accepts.
+ let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
+ executor.run_until_parked();
+ incoming_call_b.next().await.unwrap().unwrap();
+ active_call_b
+ .update(cx_b, |call, cx| call.accept_incoming(cx))
+ .await
+ .unwrap();
+
+ let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
+ executor.run_until_parked();
+
+ // User A shares their screen via the Wayland path.
+ active_call_a
+ .update(cx_a, |call, cx| {
+ call.room()
+ .unwrap()
+ .update(cx, |room, cx| room.share_screen_wayland(cx))
+ })
+ .await
+ .unwrap();
+ executor.run_until_parked();
+
+ room_a.read_with(cx_a, |room, _| {
+ assert!(room.is_sharing_screen());
+ });
+
+ // User A stops sharing.
+ room_a
+ .update(cx_a, |room, cx| room.unshare_screen(true, cx))
+ .unwrap();
+ executor.run_until_parked();
+
+ // Room A is no longer sharing, screen ID is gone.
+ room_a.read_with(cx_a, |room, _| {
+ assert!(!room.is_sharing_screen());
+ assert!(room.shared_screen_id().is_none());
+ });
+}
+
#[gpui::test]
async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
let mut server = TestServer::start(cx.executor().clone()).await;
@@ -473,7 +473,7 @@ async fn test_ssh_collaboration_git_worktrees(
repo_b.update(cx, |repo, _| {
repo.create_worktree(
"feature-branch".to_string(),
- worktree_directory.clone(),
+ worktree_directory.join("feature-branch"),
Some("abc123".to_string()),
)
})
@@ -491,7 +491,10 @@ async fn test_ssh_collaboration_git_worktrees(
.unwrap();
assert_eq!(worktrees.len(), 2);
assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch"));
- assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+ assert_eq!(
+ worktrees[1].ref_name,
+ Some("refs/heads/feature-branch".into())
+ );
assert_eq!(worktrees[1].sha.as_ref(), "abc123");
let server_worktrees = {
@@ -40,6 +40,7 @@ editor.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
+livekit_client.workspace = true
log.workspace = true
menu.workspace = true
notifications.workspace = true
@@ -59,6 +60,7 @@ title_bar.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
+zed_actions.workspace = true
[dev-dependencies]
call = { workspace = true, features = ["test-support"] }
@@ -0,0 +1,270 @@
+use call::{ActiveCall, Room, room};
+use gpui::{
+ DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, Render, Subscription,
+ Window,
+};
+use livekit_client::ConnectionQuality;
+use ui::prelude::*;
+use workspace::{ModalView, Workspace};
+use zed_actions::ShowCallStats;
+
+pub fn init(cx: &mut App) {
+ cx.observe_new(|workspace: &mut Workspace, _, _cx| {
+ workspace.register_action(|workspace, _: &ShowCallStats, window, cx| {
+ workspace.toggle_modal(window, cx, |_window, cx| CallStatsModal::new(cx));
+ });
+ })
+ .detach();
+}
+
+pub struct CallStatsModal {
+ focus_handle: FocusHandle,
+ _active_call_subscription: Option<Subscription>,
+ _diagnostics_subscription: Option<Subscription>,
+}
+
+impl CallStatsModal {
+ fn new(cx: &mut Context<Self>) -> Self {
+ let mut this = Self {
+ focus_handle: cx.focus_handle(),
+ _active_call_subscription: None,
+ _diagnostics_subscription: None,
+ };
+
+ if let Some(active_call) = ActiveCall::try_global(cx) {
+ this._active_call_subscription =
+ Some(cx.subscribe(&active_call, Self::handle_call_event));
+ this.observe_diagnostics(cx);
+ }
+
+ this
+ }
+
+ fn observe_diagnostics(&mut self, cx: &mut Context<Self>) {
+ let diagnostics = active_room(cx).and_then(|room| room.read(cx).diagnostics().cloned());
+
+ if let Some(diagnostics) = diagnostics {
+ self._diagnostics_subscription = Some(cx.observe(&diagnostics, |_, _, cx| cx.notify()));
+ } else {
+ self._diagnostics_subscription = None;
+ }
+ }
+
+ fn handle_call_event(
+ &mut self,
+ _: Entity<ActiveCall>,
+ event: &room::Event,
+ cx: &mut Context<Self>,
+ ) {
+ match event {
+ room::Event::RoomJoined { .. } => {
+ self.observe_diagnostics(cx);
+ }
+ room::Event::RoomLeft { .. } => {
+ self._diagnostics_subscription = None;
+ cx.notify();
+ }
+ _ => {}
+ }
+ }
+
+ fn dismiss(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
+ cx.emit(DismissEvent);
+ }
+}
+
+fn active_room(cx: &App) -> Option<Entity<Room>> {
+ ActiveCall::try_global(cx)?.read(cx).room().cloned()
+}
+
+fn quality_label(quality: Option<ConnectionQuality>) -> (&'static str, Color) {
+ match quality {
+ Some(ConnectionQuality::Excellent) => ("Excellent", Color::Success),
+ Some(ConnectionQuality::Good) => ("Good", Color::Success),
+ Some(ConnectionQuality::Poor) => ("Poor", Color::Warning),
+ Some(ConnectionQuality::Lost) => ("Lost", Color::Error),
+ None => ("—", Color::Muted),
+ }
+}
+
+fn metric_rating(label: &str, value_ms: f64) -> (&'static str, Color) {
+ match label {
+ "Latency" => {
+ if value_ms < 100.0 {
+ ("Normal", Color::Success)
+ } else if value_ms < 300.0 {
+ ("High", Color::Warning)
+ } else {
+ ("Poor", Color::Error)
+ }
+ }
+ "Jitter" => {
+ if value_ms < 30.0 {
+ ("Normal", Color::Success)
+ } else if value_ms < 75.0 {
+ ("High", Color::Warning)
+ } else {
+ ("Poor", Color::Error)
+ }
+ }
+ _ => ("Normal", Color::Success),
+ }
+}
+
+fn input_lag_rating(value_ms: f64) -> (&'static str, Color) {
+ if value_ms < 20.0 {
+ ("Normal", Color::Success)
+ } else if value_ms < 50.0 {
+ ("High", Color::Warning)
+ } else {
+ ("Poor", Color::Error)
+ }
+}
+
+fn packet_loss_rating(loss_pct: f64) -> (&'static str, Color) {
+ if loss_pct < 1.0 {
+ ("Normal", Color::Success)
+ } else if loss_pct < 5.0 {
+ ("High", Color::Warning)
+ } else {
+ ("Poor", Color::Error)
+ }
+}
+
+impl EventEmitter<DismissEvent> for CallStatsModal {}
+impl ModalView for CallStatsModal {}
+
+impl Focusable for CallStatsModal {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl Render for CallStatsModal {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let room = active_room(cx);
+ let is_connected = room.is_some();
+ let stats = room
+ .and_then(|room| {
+ let diagnostics = room.read(cx).diagnostics()?;
+ Some(diagnostics.read(cx).stats().clone())
+ })
+ .unwrap_or_default();
+
+ let (quality_text, quality_color) = quality_label(stats.connection_quality);
+
+ v_flex()
+ .key_context("CallStatsModal")
+ .on_action(cx.listener(Self::dismiss))
+ .track_focus(&self.focus_handle)
+ .elevation_3(cx)
+ .w(rems(24.))
+ .p_4()
+ .gap_3()
+ .child(
+ h_flex()
+ .justify_between()
+ .child(Label::new("Call Diagnostics").size(LabelSize::Large))
+ .child(
+ Label::new(quality_text)
+ .size(LabelSize::Large)
+ .color(quality_color),
+ ),
+ )
+ .when(!is_connected, |this| {
+ this.child(
+ h_flex()
+ .justify_center()
+ .py_4()
+ .child(Label::new("Not in a call").color(Color::Muted)),
+ )
+ })
+ .when(is_connected, |this| {
+ this.child(
+ v_flex()
+ .gap_1()
+ .child(
+ h_flex()
+ .gap_2()
+ .child(Label::new("Network").weight(FontWeight::SEMIBOLD)),
+ )
+ .child(self.render_metric_row(
+ "Latency",
+ "Time for data to travel to the server",
+ stats.latency_ms,
+ |v| format!("{:.0}ms", v),
+ |v| metric_rating("Latency", v),
+ ))
+ .child(self.render_metric_row(
+ "Jitter",
+ "Variance or fluctuation in latency",
+ stats.jitter_ms,
+ |v| format!("{:.0}ms", v),
+ |v| metric_rating("Jitter", v),
+ ))
+ .child(self.render_metric_row(
+ "Packet loss",
+ "Amount of data lost during transfer",
+ stats.packet_loss_pct,
+ |v| format!("{:.1}%", v),
+ |v| packet_loss_rating(v),
+ ))
+ .child(self.render_metric_row(
+ "Input lag",
+ "Delay from audio capture to WebRTC",
+ stats.input_lag.map(|d| d.as_secs_f64() * 1000.0),
+ |v| format!("{:.1}ms", v),
+ |v| input_lag_rating(v),
+ )),
+ )
+ })
+ }
+}
+
+impl CallStatsModal {
+ fn render_metric_row(
+ &self,
+ title: &str,
+ description: &str,
+ value: Option<f64>,
+ format_value: impl Fn(f64) -> String,
+ rate: impl Fn(f64) -> (&'static str, Color),
+ ) -> impl IntoElement {
+ let (rating_text, rating_color, value_text) = match value {
+ Some(v) => {
+ let (rt, rc) = rate(v);
+ (rt, rc, format_value(v))
+ }
+ None => ("—", Color::Muted, "—".to_string()),
+ };
+
+ h_flex()
+ .px_2()
+ .py_1()
+ .rounded_md()
+ .justify_between()
+ .child(
+ v_flex()
+ .child(Label::new(title.to_string()).size(LabelSize::Default))
+ .child(
+ Label::new(description.to_string())
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ .child(
+ v_flex()
+ .items_end()
+ .child(
+ Label::new(rating_text)
+ .size(LabelSize::Default)
+ .color(rating_color),
+ )
+ .child(
+ Label::new(value_text)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ }
+}
@@ -9,7 +9,7 @@ use channel::{Channel, ChannelEvent, ChannelStore};
use client::{ChannelId, Client, Contact, User, UserStore};
use collections::{HashMap, HashSet};
use contact_finder::ContactFinder;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::{Editor, EditorElement, EditorStyle};
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
use gpui::{
@@ -171,6 +171,7 @@ pub fn init(cx: &mut App) {
});
});
});
+ // TODO(jk): Is this action ever triggered?
workspace.register_action(|_, _: &ScreenShare, window, cx| {
let room = ActiveCall::global(cx).read(cx).room().cloned();
if let Some(room) = room {
@@ -179,19 +180,32 @@ pub fn init(cx: &mut App) {
if room.is_sharing_screen() {
room.unshare_screen(true, cx).ok();
} else {
- let sources = cx.screen_capture_sources();
-
- cx.spawn(async move |room, cx| {
- let sources = sources.await??;
- let first = sources.into_iter().next();
- if let Some(first) = first {
- room.update(cx, |room, cx| room.share_screen(first, cx))?
- .await
- } else {
- Ok(())
+ #[cfg(target_os = "linux")]
+ let is_wayland = gpui::guess_compositor() == "Wayland";
+ #[cfg(not(target_os = "linux"))]
+ let is_wayland = false;
+
+ #[cfg(target_os = "linux")]
+ {
+ if is_wayland {
+ room.share_screen_wayland(cx).detach_and_log_err(cx);
}
- })
- .detach_and_log_err(cx);
+ }
+ if !is_wayland {
+ let sources = cx.screen_capture_sources();
+
+ cx.spawn(async move |room, cx| {
+ let sources = sources.await??;
+ let first = sources.into_iter().next();
+ if let Some(first) = first {
+ room.update(cx, |room, cx| room.share_screen(first, cx))?
+ .await
+ } else {
+ Ok(())
+ }
+ })
+ .detach_and_log_err(cx);
+ }
};
});
});
@@ -429,16 +443,17 @@ impl CollabPanel {
.ok()
.flatten()
{
- Some(serialization_key) => cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
- .await
- .context("reading collaboration panel from key value store")
- .log_err()
- .flatten()
- .map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
- .transpose()
- .log_err()
- .flatten(),
+ Some(serialization_key) => {
+ let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+ kvp.read_kvp(&serialization_key)
+ .context("reading collaboration panel from key value store")
+ .log_err()
+ .flatten()
+ .map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
+ .transpose()
+ .log_err()
+ .flatten()
+ }
None => None,
};
@@ -479,19 +494,19 @@ impl CollabPanel {
};
let width = self.width;
let collapsed_channels = self.collapsed_channels.clone();
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = cx.background_spawn(
async move {
- KEY_VALUE_STORE
- .write_kvp(
- serialization_key,
- serde_json::to_string(&SerializedCollabPanel {
- width,
- collapsed_channels: Some(
- collapsed_channels.iter().map(|cid| cid.0).collect(),
- ),
- })?,
- )
- .await?;
+ kvp.write_kvp(
+ serialization_key,
+ serde_json::to_string(&SerializedCollabPanel {
+ width,
+ collapsed_channels: Some(
+ collapsed_channels.iter().map(|cid| cid.0).collect(),
+ ),
+ })?,
+ )
+ .await?;
anyhow::Ok(())
}
.log_err(),
@@ -1,3 +1,4 @@
+mod call_stats_modal;
pub mod channel_view;
pub mod collab_panel;
pub mod notification_panel;
@@ -18,6 +19,7 @@ use workspace::AppState;
// Another comment, nice.
pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
+ call_stats_modal::init(cx);
channel_view::init(cx);
collab_panel::init(cx);
notification_panel::init(cx);
@@ -3,7 +3,7 @@ use anyhow::Result;
use channel::ChannelStore;
use client::{ChannelId, Client, Notification, User, UserStore};
use collections::HashMap;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use futures::StreamExt;
use gpui::{
AnyElement, App, AsyncWindowContext, ClickEvent, Context, DismissEvent, Element, Entity,
@@ -186,16 +186,13 @@ impl NotificationPanel {
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
- let serialized_panel = if let Some(panel) = cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(NOTIFICATION_PANEL_KEY) })
- .await
- .log_err()
- .flatten()
- {
- Some(serde_json::from_str::<SerializedNotificationPanel>(&panel)?)
- } else {
- None
- };
+ let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+ let serialized_panel =
+ if let Some(panel) = kvp.read_kvp(NOTIFICATION_PANEL_KEY).log_err().flatten() {
+ Some(serde_json::from_str::<SerializedNotificationPanel>(&panel)?)
+ } else {
+ None
+ };
workspace.update_in(cx, |workspace, window, cx| {
let panel = Self::new(workspace, window, cx);
@@ -212,14 +209,14 @@ impl NotificationPanel {
fn serialize(&mut self, cx: &mut Context<Self>) {
let width = self.width;
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = cx.background_spawn(
async move {
- KEY_VALUE_STORE
- .write_kvp(
- NOTIFICATION_PANEL_KEY.into(),
- serde_json::to_string(&SerializedNotificationPanel { width })?,
- )
- .await?;
+ kvp.write_kvp(
+ NOTIFICATION_PANEL_KEY.into(),
+ serde_json::to_string(&SerializedNotificationPanel { width })?,
+ )
+ .await?;
anyhow::Ok(())
}
.log_err(),
@@ -18,7 +18,7 @@ use gpui::{
Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
ParentElement, Render, Styled, Task, WeakEntity, Window,
};
-use persistence::COMMAND_PALETTE_HISTORY;
+use persistence::CommandPaletteDB;
use picker::Direction;
use picker::{Picker, PickerDelegate};
use postage::{sink::Sink, stream::Stream};
@@ -33,7 +33,11 @@ pub fn init(cx: &mut App) {
cx.observe_new(CommandPalette::register).detach();
}
-impl ModalView for CommandPalette {}
+impl ModalView for CommandPalette {
+ fn is_command_palette(&self) -> bool {
+ true
+ }
+}
pub struct CommandPalette {
picker: Entity<Picker<CommandPaletteDelegate>>,
@@ -180,9 +184,9 @@ struct QueryHistory {
}
impl QueryHistory {
- fn history(&mut self) -> &mut VecDeque<String> {
+ fn history(&mut self, cx: &App) -> &mut VecDeque<String> {
self.history.get_or_insert_with(|| {
- COMMAND_PALETTE_HISTORY
+ CommandPaletteDB::global(cx)
.list_recent_queries()
.unwrap_or_default()
.into_iter()
@@ -190,18 +194,18 @@ impl QueryHistory {
})
}
- fn add(&mut self, query: String) {
- if let Some(pos) = self.history().iter().position(|h| h == &query) {
- self.history().remove(pos);
+ fn add(&mut self, query: String, cx: &App) {
+ if let Some(pos) = self.history(cx).iter().position(|h| h == &query) {
+ self.history(cx).remove(pos);
}
- self.history().push_back(query);
+ self.history(cx).push_back(query);
self.cursor = None;
self.prefix = None;
}
- fn validate_cursor(&mut self, current_query: &str) -> Option<usize> {
+ fn validate_cursor(&mut self, current_query: &str, cx: &App) -> Option<usize> {
if let Some(pos) = self.cursor {
- if self.history().get(pos).map(|s| s.as_str()) != Some(current_query) {
+ if self.history(cx).get(pos).map(|s| s.as_str()) != Some(current_query) {
self.cursor = None;
self.prefix = None;
}
@@ -209,39 +213,39 @@ impl QueryHistory {
self.cursor
}
- fn previous(&mut self, current_query: &str) -> Option<&str> {
- if self.validate_cursor(current_query).is_none() {
+ fn previous(&mut self, current_query: &str, cx: &App) -> Option<&str> {
+ if self.validate_cursor(current_query, cx).is_none() {
self.prefix = Some(current_query.to_string());
}
let prefix = self.prefix.clone().unwrap_or_default();
- let start_index = self.cursor.unwrap_or(self.history().len());
+ let start_index = self.cursor.unwrap_or(self.history(cx).len());
for i in (0..start_index).rev() {
if self
- .history()
+ .history(cx)
.get(i)
.is_some_and(|e| e.starts_with(&prefix))
{
self.cursor = Some(i);
- return self.history().get(i).map(|s| s.as_str());
+ return self.history(cx).get(i).map(|s| s.as_str());
}
}
None
}
- fn next(&mut self, current_query: &str) -> Option<&str> {
- let selected = self.validate_cursor(current_query)?;
+ fn next(&mut self, current_query: &str, cx: &App) -> Option<&str> {
+ let selected = self.validate_cursor(current_query, cx)?;
let prefix = self.prefix.clone().unwrap_or_default();
- for i in (selected + 1)..self.history().len() {
+ for i in (selected + 1)..self.history(cx).len() {
if self
- .history()
+ .history(cx)
.get(i)
.is_some_and(|e| e.starts_with(&prefix))
{
self.cursor = Some(i);
- return self.history().get(i).map(|s| s.as_str());
+ return self.history(cx).get(i).map(|s| s.as_str());
}
}
None
@@ -338,8 +342,8 @@ impl CommandPaletteDelegate {
/// Hit count for each command in the palette.
/// We only account for commands triggered directly via command palette and not by e.g. keystrokes because
/// if a user already knows a keystroke for a command, they are unlikely to use a command palette to look for it.
- fn hit_counts(&self) -> HashMap<String, u16> {
- if let Ok(commands) = COMMAND_PALETTE_HISTORY.list_commands_used() {
+ fn hit_counts(&self, cx: &App) -> HashMap<String, u16> {
+ if let Ok(commands) = CommandPaletteDB::global(cx).list_commands_used() {
commands
.into_iter()
.map(|command| (command.command_name, command.invocations))
@@ -378,21 +382,25 @@ impl PickerDelegate for CommandPaletteDelegate {
direction: Direction,
query: &str,
_window: &mut Window,
- _cx: &mut App,
+ cx: &mut App,
) -> Option<String> {
match direction {
Direction::Up => {
let should_use_history =
self.selected_ix == 0 || self.query_history.is_navigating();
if should_use_history {
- if let Some(query) = self.query_history.previous(query).map(|s| s.to_string()) {
+ if let Some(query) = self
+ .query_history
+ .previous(query, cx)
+ .map(|s| s.to_string())
+ {
return Some(query);
}
}
}
Direction::Down => {
if self.query_history.is_navigating() {
- if let Some(query) = self.query_history.next(query).map(|s| s.to_string()) {
+ if let Some(query) = self.query_history.next(query, cx).map(|s| s.to_string()) {
return Some(query);
} else {
let prefix = self.query_history.prefix.take().unwrap_or_default();
@@ -444,7 +452,7 @@ impl PickerDelegate for CommandPaletteDelegate {
let task = cx.background_spawn({
let mut commands = self.all_commands.clone();
- let hit_counts = self.hit_counts();
+ let hit_counts = self.hit_counts(cx);
let executor = cx.background_executor().clone();
let query = normalize_action_query(query_str);
let query_for_link = query_str.to_string();
@@ -566,7 +574,7 @@ impl PickerDelegate for CommandPaletteDelegate {
}
if !self.latest_query.is_empty() {
- self.query_history.add(self.latest_query.clone());
+ self.query_history.add(self.latest_query.clone(), cx);
self.query_history.reset_cursor();
}
@@ -581,9 +589,9 @@ impl PickerDelegate for CommandPaletteDelegate {
self.commands.clear();
let command_name = command.name.clone();
let latest_query = self.latest_query.clone();
+ let db = CommandPaletteDB::global(cx);
cx.background_spawn(async move {
- COMMAND_PALETTE_HISTORY
- .write_command_invocation(command_name, latest_query)
+ db.write_command_invocation(command_name, latest_query)
.await
})
.detach_and_log_err(cx);
@@ -771,11 +779,9 @@ mod tests {
#[gpui::test]
async fn test_command_palette(cx: &mut TestAppContext) {
- persistence::COMMAND_PALETTE_HISTORY
- .clear_all()
- .await
- .unwrap();
let app_state = init_test(cx);
+ let db = cx.update(|cx| persistence::CommandPaletteDB::global(cx));
+ db.clear_all().await.unwrap();
let project = Project::test(app_state.fs.clone(), [], cx).await;
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
@@ -69,7 +69,7 @@ impl Domain for CommandPaletteDB {
)];
}
-db::static_connection!(COMMAND_PALETTE_HISTORY, CommandPaletteDB, []);
+db::static_connection!(CommandPaletteDB, []);
impl CommandPaletteDB {
pub async fn write_command_invocation(
@@ -48,7 +48,10 @@ fn main() {
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
let session_id = uuid::Uuid::new_v4().to_string();
- let session = cx.foreground_executor().block_on(Session::new(session_id));
+ let kvp = db::kvp::KeyValueStore::global(cx);
+ let session = cx
+ .foreground_executor()
+ .block_on(Session::new(session_id, kvp));
let session = cx.new(|cx| AppSession::new(session, cx));
let node_runtime = NodeRuntime::unavailable();
@@ -9,7 +9,7 @@ use gpui::{
use gpui::{ListState, ScrollHandle, ScrollStrategy, UniformListScrollHandle};
use language::LanguageRegistry;
use notifications::status_toast::{StatusToast, ToastIcon};
-use persistence::COMPONENT_PREVIEW_DB;
+use persistence::ComponentPreviewDb;
use project::Project;
use std::{iter::Iterator, ops::Range, sync::Arc};
use ui::{ButtonLike, Divider, HighlightedLabel, ListItem, ListSubHeader, Tooltip, prelude::*};
@@ -784,7 +784,7 @@ impl SerializableItem for ComponentPreview {
cx: &mut App,
) -> Task<anyhow::Result<Entity<Self>>> {
let deserialized_active_page =
- match COMPONENT_PREVIEW_DB.get_active_page(item_id, workspace_id) {
+ match ComponentPreviewDb::global(cx).get_active_page(item_id, workspace_id) {
Ok(page) => {
if let Some(page) = page {
ActivePageId(page)
@@ -845,7 +845,7 @@ impl SerializableItem for ComponentPreview {
alive_items,
workspace_id,
"component_previews",
- &COMPONENT_PREVIEW_DB,
+ &ComponentPreviewDb::global(cx),
cx,
)
}
@@ -860,9 +860,9 @@ impl SerializableItem for ComponentPreview {
) -> Option<Task<anyhow::Result<()>>> {
let active_page = self.active_page_id(cx);
let workspace_id = self.workspace_id?;
+ let db = ComponentPreviewDb::global(cx);
Some(cx.background_spawn(async move {
- COMPONENT_PREVIEW_DB
- .save_active_page(item_id, workspace_id, active_page.0)
+ db.save_active_page(item_id, workspace_id, active_page.0)
.await
}))
}
@@ -23,7 +23,7 @@ impl Domain for ComponentPreviewDb {
)];
}
-db::static_connection!(COMPONENT_PREVIEW_DB, ComponentPreviewDb, [WorkspaceDb]);
+db::static_connection!(ComponentPreviewDb, [WorkspaceDb]);
impl ComponentPreviewDb {
pub async fn save_active_page(
@@ -19,6 +19,7 @@ test-support = []
anyhow.workspace = true
gpui.workspace = true
indoc.workspace = true
+inventory.workspace = true
log.workspace = true
paths.workspace = true
release_channel.workspace = true
@@ -26,6 +27,7 @@ smol.workspace = true
sqlez.workspace = true
sqlez_macros.workspace = true
util.workspace = true
+uuid.workspace = true
zed_env_vars.workspace = true
[dev-dependencies]
@@ -4,12 +4,15 @@ pub mod query;
// Re-export
pub use anyhow;
use anyhow::Context as _;
-use gpui::{App, AppContext};
+pub use gpui;
+use gpui::{App, AppContext, Global};
pub use indoc::indoc;
+pub use inventory;
pub use paths::database_dir;
pub use smol;
pub use sqlez;
pub use sqlez_macros;
+pub use uuid;
pub use release_channel::RELEASE_CHANNEL;
use sqlez::domain::Migrator;
@@ -22,6 +25,103 @@ use std::sync::{LazyLock, atomic::Ordering};
use util::{ResultExt, maybe};
use zed_env_vars::ZED_STATELESS;
+/// A migration registered via `static_connection!` and collected at link time.
+pub struct DomainMigration {
+ pub name: &'static str,
+ pub migrations: &'static [&'static str],
+ pub dependencies: &'static [&'static str],
+ pub should_allow_migration_change: fn(usize, &str, &str) -> bool,
+}
+
+inventory::collect!(DomainMigration);
+
+/// The shared database connection backing all domain-specific DB wrappers.
+/// Set as a GPUI global per-App. Falls back to a shared LazyLock if not set.
+pub struct AppDatabase(pub ThreadSafeConnection);
+
+impl Global for AppDatabase {}
+
+/// Migrator that runs all inventory-registered domain migrations.
+pub struct AppMigrator;
+
+impl Migrator for AppMigrator {
+ fn migrate(connection: &sqlez::connection::Connection) -> anyhow::Result<()> {
+ let registrations: Vec<&DomainMigration> = inventory::iter::<DomainMigration>().collect();
+ let sorted = topological_sort(®istrations);
+ for reg in &sorted {
+ let mut should_allow = reg.should_allow_migration_change;
+ connection.migrate(reg.name, reg.migrations, &mut should_allow)?;
+ }
+ Ok(())
+ }
+}
+
+impl AppDatabase {
+ /// Opens the production database and runs all inventory-registered
+ /// migrations in dependency order.
+ pub fn new() -> Self {
+ let db_dir = database_dir();
+ let scope = RELEASE_CHANNEL.dev_name();
+ let connection = smol::block_on(open_db::<AppMigrator>(db_dir, scope));
+ Self(connection)
+ }
+
+ /// Creates a new in-memory database with a unique name and runs all
+ /// inventory-registered migrations in dependency order.
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn test_new() -> Self {
+ let name = format!("test-db-{}", uuid::Uuid::new_v4());
+ let connection = smol::block_on(open_test_db::<AppMigrator>(&name));
+ Self(connection)
+ }
+
+ /// Returns the per-App connection if set, otherwise falls back to
+ /// the shared LazyLock.
+ pub fn global(cx: &App) -> &ThreadSafeConnection {
+ #[allow(unreachable_code)]
+ if let Some(db) = cx.try_global::<Self>() {
+ return &db.0;
+ } else {
+ #[cfg(any(feature = "test-support", test))]
+ return &TEST_APP_DATABASE.0;
+
+ panic!("database not initialized")
+ }
+ }
+}
+
+fn topological_sort<'a>(registrations: &[&'a DomainMigration]) -> Vec<&'a DomainMigration> {
+ let mut sorted: Vec<&DomainMigration> = Vec::new();
+ let mut visited: std::collections::HashSet<&str> = std::collections::HashSet::new();
+
+ fn visit<'a>(
+ name: &str,
+ registrations: &[&'a DomainMigration],
+ sorted: &mut Vec<&'a DomainMigration>,
+ visited: &mut std::collections::HashSet<&'a str>,
+ ) {
+ if visited.contains(name) {
+ return;
+ }
+ if let Some(reg) = registrations.iter().find(|r| r.name == name) {
+ for dep in reg.dependencies {
+ visit(dep, registrations, sorted, visited);
+ }
+ visited.insert(reg.name);
+ sorted.push(reg);
+ }
+ }
+
+ for reg in registrations {
+ visit(reg.name, registrations, &mut sorted, &mut visited);
+ }
+ sorted
+}
+
+/// Shared fallback `AppDatabase` used when no per-App global is set.
+#[cfg(any(test, feature = "test-support"))]
+static TEST_APP_DATABASE: LazyLock<AppDatabase> = LazyLock::new(AppDatabase::test_new);
+
const CONNECTION_INITIALIZE_QUERY: &str = sql!(
PRAGMA foreign_keys=TRUE;
);
@@ -110,12 +210,11 @@ pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection {
/// Implements a basic DB wrapper for a given domain
///
/// Arguments:
-/// - static variable name for connection
/// - type of connection wrapper
/// - dependencies, whose migrations should be run prior to this domain's migrations
#[macro_export]
macro_rules! static_connection {
- ($id:ident, $t:ident, [ $($d:ty),* ] $(, $global:ident)?) => {
+ ($t:ident, [ $($d:ty),* ]) => {
impl ::std::ops::Deref for $t {
type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection;
@@ -124,30 +223,33 @@ macro_rules! static_connection {
}
}
+ impl ::std::clone::Clone for $t {
+ fn clone(&self) -> Self {
+ $t(self.0.clone())
+ }
+ }
+
impl $t {
+ /// Returns an instance backed by the per-App database if set,
+ /// or the shared fallback connection otherwise.
+ pub fn global(cx: &$crate::gpui::App) -> Self {
+ $t($crate::AppDatabase::global(cx).clone())
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub async fn open_test_db(name: &'static str) -> Self {
$t($crate::open_test_db::<$t>(name).await)
}
}
- #[cfg(any(test, feature = "test-support"))]
- pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- #[allow(unused_parens)]
- $t($crate::smol::block_on($crate::open_test_db::<($($d,)* $t)>(stringify!($id))))
- });
-
- #[cfg(not(any(test, feature = "test-support")))]
- pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- let db_dir = $crate::database_dir();
- let scope = if false $(|| stringify!($global) == "global")? {
- "global"
- } else {
- $crate::RELEASE_CHANNEL.dev_name()
- };
- #[allow(unused_parens)]
- $t($crate::smol::block_on($crate::open_db::<($($d,)* $t)>(db_dir, scope)))
- });
+ $crate::inventory::submit! {
+ $crate::DomainMigration {
+ name: <$t as $crate::sqlez::domain::Domain>::NAME,
+ migrations: <$t as $crate::sqlez::domain::Domain>::MIGRATIONS,
+ dependencies: &[$(<$d as $crate::sqlez::domain::Domain>::NAME),*],
+ should_allow_migration_change: <$t as $crate::sqlez::domain::Domain>::should_allow_migration_change,
+ }
+ }
}
}
@@ -11,6 +11,12 @@ use crate::{
pub struct KeyValueStore(crate::sqlez::thread_safe_connection::ThreadSafeConnection);
+impl KeyValueStore {
+ pub fn from_app_db(db: &crate::AppDatabase) -> Self {
+ Self(db.0.clone())
+ }
+}
+
impl Domain for KeyValueStore {
const NAME: &str = stringify!(KeyValueStore);
@@ -32,26 +38,25 @@ impl Domain for KeyValueStore {
];
}
-crate::static_connection!(KEY_VALUE_STORE, KeyValueStore, []);
+crate::static_connection!(KeyValueStore, []);
pub trait Dismissable {
const KEY: &'static str;
- fn dismissed() -> bool {
- KEY_VALUE_STORE
+ fn dismissed(cx: &App) -> bool {
+ KeyValueStore::global(cx)
.read_kvp(Self::KEY)
.log_err()
.is_some_and(|s| s.is_some())
}
fn set_dismissed(is_dismissed: bool, cx: &mut App) {
+ let db = KeyValueStore::global(cx);
write_and_log(cx, move || async move {
if is_dismissed {
- KEY_VALUE_STORE
- .write_kvp(Self::KEY.into(), "1".into())
- .await
+ db.write_kvp(Self::KEY.into(), "1".into()).await
} else {
- KEY_VALUE_STORE.delete_kvp(Self::KEY.into()).await
+ db.delete_kvp(Self::KEY.into()).await
}
})
}
@@ -228,9 +233,26 @@ impl Domain for GlobalKeyValueStore {
)];
}
-crate::static_connection!(GLOBAL_KEY_VALUE_STORE, GlobalKeyValueStore, [], global);
+impl std::ops::Deref for GlobalKeyValueStore {
+ type Target = ThreadSafeConnection;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+static GLOBAL_KEY_VALUE_STORE: std::sync::LazyLock<GlobalKeyValueStore> =
+ std::sync::LazyLock::new(|| {
+ let db_dir = crate::database_dir();
+ GlobalKeyValueStore(smol::block_on(crate::open_db::<GlobalKeyValueStore>(
+ db_dir, "global",
+ )))
+ });
impl GlobalKeyValueStore {
+ pub fn global() -> &'static Self {
+ &GLOBAL_KEY_VALUE_STORE
+ }
+
query! {
pub fn read_kvp(key: &str) -> Result<Option<String>> {
SELECT value FROM kv_store WHERE key = (?)
@@ -1461,7 +1461,12 @@ async fn register_session_inner(
.detach();
})
.ok();
- let serialized_layout = persistence::get_serialized_layout(adapter_name).await;
+ let serialized_layout = this
+ .update(cx, |_, cx| {
+ persistence::get_serialized_layout(&adapter_name, &db::kvp::KeyValueStore::global(cx))
+ })
+ .ok()
+ .flatten();
let debug_session = this.update_in(cx, |this, window, cx| {
let parent_session = this
.sessions_with_children
@@ -1,7 +1,7 @@
use anyhow::Context as _;
use collections::HashMap;
use dap::{Capabilities, adapters::DebugAdapterName};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use gpui::{Axis, Context, Entity, EntityId, Focusable, Subscription, WeakEntity, Window};
use project::Project;
use serde::{Deserialize, Serialize};
@@ -125,15 +125,15 @@ const DEBUGGER_PANEL_PREFIX: &str = "debugger_panel_";
pub(crate) async fn serialize_pane_layout(
adapter_name: DebugAdapterName,
pane_group: SerializedLayout,
+ kvp: KeyValueStore,
) -> anyhow::Result<()> {
let serialized_pane_group = serde_json::to_string(&pane_group)
.context("Serializing pane group with serde_json as a string")?;
- KEY_VALUE_STORE
- .write_kvp(
- format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
- serialized_pane_group,
- )
- .await
+ kvp.write_kvp(
+ format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
+ serialized_pane_group,
+ )
+ .await
}
pub(crate) fn build_serialized_layout(
@@ -187,13 +187,13 @@ fn serialize_pane(pane: &Entity<Pane>, cx: &App) -> SerializedPane {
}
}
-pub(crate) async fn get_serialized_layout(
+pub(crate) fn get_serialized_layout(
adapter_name: impl AsRef<str>,
+ kvp: &KeyValueStore,
) -> Option<SerializedLayout> {
let key = format!("{DEBUGGER_PANEL_PREFIX}-{}", adapter_name.as_ref());
- KEY_VALUE_STORE
- .read_kvp(&key)
+ kvp.read_kvp(&key)
.log_err()
.flatten()
.and_then(|value| serde_json::from_str::<SerializedLayout>(&value).ok())
@@ -1313,6 +1313,7 @@ impl RunningState {
show_summary: false,
show_command: false,
show_rerun: false,
+ save: task::SaveStrategy::default(),
};
let workspace = self.workspace.clone();
@@ -1501,9 +1502,14 @@ impl RunningState {
return;
};
- persistence::serialize_pane_layout(adapter_name, pane_layout)
- .await
- .log_err();
+ let kvp = this
+ .read_with(cx, |_, cx| db::kvp::KeyValueStore::global(cx))
+ .ok();
+ if let Some(kvp) = kvp {
+ persistence::serialize_pane_layout(adapter_name, pane_layout, kvp)
+ .await
+ .log_err();
+ }
this.update(cx, |this, _| {
this._schedule_serialize.take();
@@ -6,7 +6,7 @@ use std::{
};
use dap::{Capabilities, ExceptionBreakpointsFilter, adapters::DebugAdapterName};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::Editor;
use gpui::{
Action, AppContext, ClickEvent, Entity, FocusHandle, Focusable, MouseButton, ScrollStrategy,
@@ -520,8 +520,9 @@ impl BreakpointList {
});
let value = serde_json::to_string(&settings);
+ let kvp = KeyValueStore::global(cx);
cx.background_executor()
- .spawn(async move { KEY_VALUE_STORE.write_kvp(key, value?).await })
+ .spawn(async move { kvp.write_kvp(key, value?).await })
} else {
Task::ready(Result::Ok(()))
}
@@ -532,7 +533,7 @@ impl BreakpointList {
adapter_name: DebugAdapterName,
cx: &mut Context<Self>,
) -> anyhow::Result<()> {
- let Some(val) = KEY_VALUE_STORE.read_kvp(&Self::kvp_key(&adapter_name))? else {
+ let Some(val) = KeyValueStore::global(cx).read_kvp(&Self::kvp_key(&adapter_name))? else {
return Ok(());
};
let value: PersistedAdapterOptions = serde_json::from_str(&val)?;
@@ -303,7 +303,8 @@ impl Console {
}
fn previous_query(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context<Self>) {
- let prev = self.history.previous(&mut self.cursor);
+ let current_query = self.query_bar.read(cx).text(cx);
+ let prev = self.history.previous(&mut self.cursor, ¤t_query);
if let Some(prev) = prev {
self.query_bar.update(cx, |editor, cx| {
editor.set_text(prev, window, cx);
@@ -5,7 +5,7 @@ use std::time::Duration;
use anyhow::{Context as _, Result, anyhow};
use dap::StackFrameId;
use dap::adapters::DebugAdapterName;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use gpui::{
Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState,
Subscription, Task, WeakEntity, list,
@@ -122,7 +122,7 @@ impl StackFrameList {
.flatten()
.and_then(|database_id| {
let key = stack_frame_filter_key(&session.read(cx).adapter(), database_id);
- KEY_VALUE_STORE
+ KeyValueStore::global(cx)
.read_kvp(&key)
.ok()
.flatten()
@@ -852,8 +852,10 @@ impl StackFrameList {
.flatten()
{
let key = stack_frame_filter_key(&self.session.read(cx).adapter(), database_id);
- let save_task = KEY_VALUE_STORE.write_kvp(key, self.list_filter.into());
- cx.background_spawn(save_task).detach();
+ let kvp = KeyValueStore::global(cx);
+ let filter: String = self.list_filter.into();
+ cx.background_spawn(async move { kvp.write_kvp(key, filter).await })
+ .detach();
}
if let Some(ThreadStatus::Stopped) = thread_status {
@@ -9,7 +9,7 @@ use dap::{
StackFrame,
requests::{Scopes, StackTrace, Threads},
};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::{Editor, ToPoint as _};
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
use project::{FakeFs, Project};
@@ -1217,7 +1217,10 @@ async fn test_stack_frame_filter_persistence(
.expect("workspace id has to be some for this test to work properly");
let key = stack_frame_filter_key(&adapter_name, workspace_id);
- let stored_value = KEY_VALUE_STORE.read_kvp(&key).unwrap();
+ let stored_value = cx
+ .update(|_, cx| KeyValueStore::global(cx))
+ .read_kvp(&key)
+ .unwrap();
assert_eq!(
stored_value,
Some(StackFrameFilter::OnlyUserFrames.into()),
@@ -17,7 +17,7 @@ cli-support = []
[dependencies]
ai_onboarding.workspace = true
anyhow.workspace = true
-arrayvec.workspace = true
+heapless.workspace = true
brotli.workspace = true
buffer_diff.workspace = true
client.workspace = true
@@ -1,5 +1,4 @@
use anyhow::Result;
-use arrayvec::ArrayVec;
use client::{Client, EditPredictionUsage, UserStore};
use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody};
use cloud_llm_client::predict_edits_v3::{
@@ -12,7 +11,7 @@ use cloud_llm_client::{
};
use collections::{HashMap, HashSet};
use copilot::{Copilot, Reinstall, SignIn, SignOut};
-use db::kvp::{Dismissable, KEY_VALUE_STORE};
+use db::kvp::{Dismissable, KeyValueStore};
use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile};
use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
use futures::{
@@ -27,6 +26,7 @@ use gpui::{
http_client::{self, AsyncBody, Method},
prelude::*,
};
+use heapless::Vec as ArrayVec;
use language::language_settings::all_language_settings;
use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint};
use language::{BufferSnapshot, OffsetRangeExt};
@@ -102,6 +102,7 @@ actions!(
/// Maximum number of events to track.
const EVENT_COUNT_MAX: usize = 10;
const CHANGE_GROUPING_LINE_SPAN: u32 = 8;
+const EDIT_HISTORY_DIFF_SIZE_LIMIT: usize = 2048 * 3; // ~2048 tokens or ~50% of typical prompt budget
const COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS: usize = 512;
const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1);
const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice";
@@ -331,7 +332,7 @@ struct ProjectState {
registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
current_prediction: Option<CurrentEditPrediction>,
next_pending_prediction_id: usize,
- pending_predictions: ArrayVec<PendingPrediction, 2>,
+ pending_predictions: ArrayVec<PendingPrediction, 2, u8>,
debug_tx: Option<mpsc::UnboundedSender<DebugEvent>>,
last_edit_prediction_refresh: Option<(EntityId, Instant)>,
last_jump_prediction_refresh: Option<(EntityId, Instant)>,
@@ -724,6 +725,12 @@ fn compute_diff_between_snapshots_in_range(
let old_edit_range = old_start_line_offset..old_end_line_offset;
let new_edit_range = new_start_line_offset..new_end_line_offset;
+ if new_edit_range.len() > EDIT_HISTORY_DIFF_SIZE_LIMIT
+ || old_edit_range.len() > EDIT_HISTORY_DIFF_SIZE_LIMIT
+ {
+ return None;
+ }
+
let old_region_text: String = old_snapshot.text_for_range(old_edit_range).collect();
let new_region_text: String = new_snapshot.text_for_range(new_edit_range).collect();
@@ -770,7 +777,7 @@ impl EditPredictionStore {
}
pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
- let data_collection_choice = Self::load_data_collection_choice();
+ let data_collection_choice = Self::load_data_collection_choice(cx);
let llm_token = LlmApiToken::global(cx);
@@ -1410,8 +1417,24 @@ impl EditPredictionStore {
return;
}
+ let is_recordable_history_edit =
+ compute_diff_between_snapshots_in_range(&old_snapshot, &new_snapshot, &edit_range)
+ .is_some();
+
let events = &mut project_state.events;
+ if !is_recordable_history_edit {
+ if let Some(event) = project_state.last_event.take() {
+ if let Some(event) = event.finalize(&project_state.license_detection_watchers, cx) {
+ if events.len() + 1 >= EVENT_COUNT_MAX {
+ events.pop_front();
+ }
+ events.push_back(event);
+ }
+ }
+ return;
+ }
+
if let Some(last_event) = project_state.last_event.as_mut() {
let is_next_snapshot_of_same_buffer = old_snapshot.remote_id()
== last_event.new_snapshot.remote_id()
@@ -2151,11 +2174,12 @@ impl EditPredictionStore {
let project_state = this.get_or_init_project(&project, cx);
let throttle = *select_throttle(project_state, request_trigger);
+ let now = cx.background_executor().now();
throttle.and_then(|(last_entity, last_timestamp)| {
if throttle_entity != last_entity {
return None;
}
- (last_timestamp + throttle_timeout).checked_duration_since(Instant::now())
+ (last_timestamp + throttle_timeout).checked_duration_since(now)
})
})
.ok()
@@ -2183,7 +2207,7 @@ impl EditPredictionStore {
return;
}
- let new_refresh = (throttle_entity, Instant::now());
+ let new_refresh = (throttle_entity, cx.background_executor().now());
*select_throttle(project_state, request_trigger) = Some(new_refresh);
is_cancelled = false;
})
@@ -2287,18 +2311,24 @@ impl EditPredictionStore {
});
if project_state.pending_predictions.len() < max_pending_predictions {
- project_state.pending_predictions.push(PendingPrediction {
- id: pending_prediction_id,
- task,
- drop_on_cancel,
- });
+ project_state
+ .pending_predictions
+ .push(PendingPrediction {
+ id: pending_prediction_id,
+ task,
+ drop_on_cancel,
+ })
+ .unwrap();
} else {
let pending_prediction = project_state.pending_predictions.pop().unwrap();
- project_state.pending_predictions.push(PendingPrediction {
- id: pending_prediction_id,
- task,
- drop_on_cancel,
- });
+ project_state
+ .pending_predictions
+ .push(PendingPrediction {
+ id: pending_prediction_id,
+ task,
+ drop_on_cancel,
+ })
+ .unwrap();
project_state.cancel_pending_prediction(pending_prediction, cx);
}
}
@@ -2745,8 +2775,8 @@ impl EditPredictionStore {
self.data_collection_choice.is_enabled(cx)
}
- fn load_data_collection_choice() -> DataCollectionChoice {
- let choice = KEY_VALUE_STORE
+ fn load_data_collection_choice(cx: &App) -> DataCollectionChoice {
+ let choice = KeyValueStore::global(cx)
.read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
.log_err()
.flatten();
@@ -2766,11 +2796,13 @@ impl EditPredictionStore {
self.data_collection_choice = self.data_collection_choice.toggle();
let new_choice = self.data_collection_choice;
let is_enabled = new_choice.is_enabled(cx);
- db::write_and_log(cx, move || {
- KEY_VALUE_STORE.write_kvp(
+ let kvp = KeyValueStore::global(cx);
+ db::write_and_log(cx, move || async move {
+ kvp.write_kvp(
ZED_PREDICT_DATA_COLLECTION_CHOICE.into(),
is_enabled.to_string(),
)
+ .await
});
}
@@ -3006,12 +3038,13 @@ struct ZedPredictUpsell;
impl Dismissable for ZedPredictUpsell {
const KEY: &'static str = "dismissed-edit-predict-upsell";
- fn dismissed() -> bool {
+ fn dismissed(cx: &App) -> bool {
// To make this backwards compatible with older versions of Zed, we
// check if the user has seen the previous Edit Prediction Onboarding
// before, by checking the data collection choice which was written to
// the database once the user clicked on "Accept and Enable"
- if KEY_VALUE_STORE
+ let kvp = KeyValueStore::global(cx);
+ if kvp
.read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
.log_err()
.is_some_and(|s| s.is_some())
@@ -3019,15 +3052,14 @@ impl Dismissable for ZedPredictUpsell {
return true;
}
- KEY_VALUE_STORE
- .read_kvp(Self::KEY)
+ kvp.read_kvp(Self::KEY)
.log_err()
.is_some_and(|s| s.is_some())
}
}
-pub fn should_show_upsell_modal() -> bool {
- !ZedPredictUpsell::dismissed()
+pub fn should_show_upsell_modal(cx: &App) -> bool {
+ !ZedPredictUpsell::dismissed(cx)
}
pub fn init(cx: &mut App) {
@@ -1012,6 +1012,81 @@ async fn test_irrelevant_collaborator_edits_in_different_files_are_omitted_from_
assert!(events.is_empty());
}
+#[gpui::test]
+async fn test_large_edits_are_omitted_from_history(cx: &mut TestAppContext) {
+ let (ep_store, _requests) = init_test_with_fake_client(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "foo.rs": (0..20)
+ .map(|i| format!("line {i}\n"))
+ .collect::<String>()
+ }),
+ )
+ .await;
+ let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
+ project.set_active_path(Some(path.clone()), cx);
+ project.open_buffer(path, cx)
+ })
+ .await
+ .unwrap();
+
+ let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
+
+ ep_store.update(cx, |ep_store, cx| {
+ ep_store.register_buffer(&buffer, &project, cx);
+ let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
+ });
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
+ });
+
+ let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
+
+ let (line_three_start, line_three_len) = collaborator.read_with(cx, |buffer, _cx| {
+ (Point::new(3, 0).to_offset(buffer), buffer.line_len(3))
+ });
+ let large_edit = "X".repeat(EDIT_HISTORY_DIFF_SIZE_LIMIT + 1);
+
+ apply_collaborator_edit(
+ &collaborator,
+ &buffer,
+ &mut collaborator_version,
+ line_three_start..line_three_start + line_three_len as usize,
+ &large_edit,
+ cx,
+ )
+ .await;
+
+ buffer.update(cx, |buffer, cx| {
+ let line_seven_start = Point::new(7, 0).to_offset(buffer);
+ let line_seven_end = Point::new(7, 6).to_offset(buffer);
+ buffer.edit(
+ vec![(line_seven_start..line_seven_end, "LOCAL SEVEN")],
+ None,
+ cx,
+ );
+ });
+
+ let events = ep_store.update(cx, |ep_store, cx| {
+ ep_store.edit_history_for_project(&project, cx)
+ });
+
+ let rendered_events = render_events_with_predicted(&events);
+
+ assert_eq!(rendered_events.len(), 2);
+ assert!(rendered_events[0].contains("+LOCAL ZERO"));
+ assert!(!rendered_events[0].contains(&large_edit));
+ assert!(rendered_events[1].contains("+LOCAL SEVEN"));
+ assert!(!rendered_events[1].contains(&large_edit));
+}
+
#[gpui::test]
async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) {
let (ep_store, _requests) = init_test_with_fake_client(cx);
@@ -1807,7 +1882,9 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
reason: EditPredictionRejectReason::Replaced,
was_shown: false,
model_version: None,
- e2e_latency_ms: Some(0),
+ // 2 throttle waits (for 2nd and 3rd requests) elapsed
+ // between this request's start and response.
+ e2e_latency_ms: Some(2 * EditPredictionStore::THROTTLE_TIMEOUT.as_millis()),
}
]
);
@@ -26,6 +26,14 @@ pub fn encode_cursor_in_patch(patch: &str, cursor_offset: Option<usize>) -> Stri
let mut line_start_offset = 0usize;
for line in patch.lines() {
+ if matches!(
+ DiffLine::parse(line),
+ DiffLine::Garbage(content)
+ if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER)
+ ) {
+ continue;
+ }
+
if !result.is_empty() {
result.push('\n');
}
@@ -846,6 +854,31 @@ mod tests {
assert_eq!(results, vec![(clean_patch, None)]);
}
+ #[test]
+ fn test_encode_cursor_in_patch_is_idempotent() {
+ let patch = indoc! {r#"
+ --- a/test.rs
+ +++ b/test.rs
+ @@ -1,2 +1,2 @@
+ -fn old() {}
+ +fn new_name() {}
+ # ^[CURSOR_POSITION]
+ "#};
+
+ let cursor_offset = "fn new_name() {}".find("name").unwrap();
+ let encoded_once = encode_cursor_in_patch(patch, Some(cursor_offset));
+ let encoded_twice = encode_cursor_in_patch(&encoded_once, Some(cursor_offset));
+
+ assert_eq!(encoded_once, encoded_twice);
+ assert_eq!(
+ encoded_once
+ .lines()
+ .filter(|line| line.contains(CURSOR_POSITION_MARKER))
+ .count(),
+ 1
+ );
+ }
+
#[test]
fn test_from_markdown_accepted_prediction_marker() {
let markdown = indoc! {r#"
@@ -365,10 +365,10 @@ pub fn request_prediction_with_zeta(
});
cx.spawn(async move |this, cx| {
- let request_duration = cx.background_executor().now() - request_start;
let Some((id, prediction)) = handle_api_response(&this, request_task.await, cx)? else {
return Ok(None);
};
+ let request_duration = cx.background_executor().now() - request_start;
let Some(Prediction {
prompt_input: inputs,
@@ -21,6 +21,7 @@ clap = "4"
client.workspace = true
cloud_llm_client.workspace= true
collections.workspace = true
+db.workspace = true
debug_adapter_extension.workspace = true
dirs.workspace = true
extension.workspace = true
@@ -82,6 +82,10 @@ pub struct ExamplePrediction {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
pub provider: PredictionProvider,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub cumulative_logprob: Option<f64>,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub avg_logprob: Option<f64>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -166,6 +170,10 @@ pub struct ExampleScore {
pub inserted_tokens: usize,
#[serde(default)]
pub deleted_tokens: usize,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub cumulative_logprob: Option<f64>,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub avg_logprob: Option<f64>,
}
impl Example {
@@ -136,6 +136,48 @@ pub fn zeta2_output_for_patch(
},
)?;
+ if version == ZetaFormat::V0317SeedMultiRegions {
+ let cursor_in_new = cursor_offset.map(|cursor_offset| {
+ let hunk_start = first_hunk_offset.unwrap_or(0);
+ result.floor_char_boundary((hunk_start + cursor_offset).min(result.len()))
+ });
+ return multi_region::encode_from_old_and_new_v0317(
+ &old_editable_region,
+ &result,
+ cursor_in_new,
+ zeta_prompt::CURSOR_MARKER,
+ multi_region::V0317_END_MARKER,
+ );
+ }
+
+ if version == ZetaFormat::V0318SeedMultiRegions {
+ let cursor_in_new = cursor_offset.map(|cursor_offset| {
+ let hunk_start = first_hunk_offset.unwrap_or(0);
+ result.floor_char_boundary((hunk_start + cursor_offset).min(result.len()))
+ });
+ return multi_region::encode_from_old_and_new_v0318(
+ &old_editable_region,
+ &result,
+ cursor_in_new,
+ zeta_prompt::CURSOR_MARKER,
+ multi_region::V0318_END_MARKER,
+ );
+ }
+
+ if version == ZetaFormat::V0316SeedMultiRegions {
+ let cursor_in_new = cursor_offset.map(|cursor_offset| {
+ let hunk_start = first_hunk_offset.unwrap_or(0);
+ result.floor_char_boundary((hunk_start + cursor_offset).min(result.len()))
+ });
+ return multi_region::encode_from_old_and_new_v0316(
+ &old_editable_region,
+ &result,
+ cursor_in_new,
+ zeta_prompt::CURSOR_MARKER,
+ multi_region::V0316_END_MARKER,
+ );
+ }
+
if version == ZetaFormat::V0306SeedMultiRegions {
let cursor_in_new = cursor_offset.map(|cursor_offset| {
let hunk_start = first_hunk_offset.unwrap_or(0);
@@ -1,4 +1,5 @@
use client::{Client, ProxySettings, UserStore};
+use db::AppDatabase;
use extension::ExtensionHostProxy;
use fs::RealFs;
use gpui::http_client::read_proxy_from_env;
@@ -61,6 +62,9 @@ pub fn init(cx: &mut App) -> EpAppState {
let client = Client::production(cx);
cx.set_http_client(client.http_client());
+ let app_db = AppDatabase::new();
+ cx.set_global(app_db);
+
let git_binary_path = None;
let fs = Arc::new(RealFs::new(
git_binary_path,
@@ -263,6 +263,8 @@ pub async fn run_prediction(
actual_cursor: None,
error: None,
provider,
+ cumulative_logprob: None,
+ avg_logprob: None,
});
step_progress.set_substatus("requesting prediction");
@@ -455,6 +457,8 @@ async fn predict_anthropic(
_ => PredictionProvider::TeacherNonBatching(backend),
}
},
+ cumulative_logprob: None,
+ avg_logprob: None,
};
example.predictions.push(prediction);
@@ -572,6 +576,8 @@ async fn predict_openai(
_ => PredictionProvider::TeacherNonBatching(backend),
}
},
+ cumulative_logprob: None,
+ avg_logprob: None,
};
example.predictions.push(prediction);
@@ -656,6 +662,8 @@ pub async fn predict_baseten(
actual_cursor,
error: None,
provider: PredictionProvider::Baseten(format),
+ cumulative_logprob: None,
+ avg_logprob: None,
};
example.predictions.push(prediction);
@@ -426,6 +426,8 @@ pub async fn run_repair(
actual_cursor,
error: err,
provider: PredictionProvider::Repair,
+ cumulative_logprob: None,
+ avg_logprob: None,
});
Ok(())
@@ -78,6 +78,8 @@ pub async fn run_scoring(
has_isolated_whitespace_changes: false,
inserted_tokens: 0,
deleted_tokens: 0,
+ cumulative_logprob: None,
+ avg_logprob: None,
};
let cursor_path = example.spec.cursor_path.as_ref();
@@ -189,6 +191,8 @@ pub async fn run_scoring(
has_isolated_whitespace_changes,
inserted_tokens: token_changes.inserted_tokens,
deleted_tokens: token_changes.deleted_tokens,
+ cumulative_logprob: prediction.cumulative_logprob,
+ avg_logprob: prediction.avg_logprob,
});
}
@@ -379,7 +379,7 @@ impl Render for EditPredictionButton {
}
};
- if edit_prediction::should_show_upsell_modal() {
+ if edit_prediction::should_show_upsell_modal(cx) {
let tooltip_meta = if self.user_store.read(cx).current_user().is_some() {
"Choose a Plan"
} else {
@@ -699,8 +699,6 @@ actions!(
Rename,
/// Restarts the language server for the current file.
RestartLanguageServer,
- /// Reveals the current file in the system file manager.
- RevealInFileManager,
/// Reverses the order of selected lines.
ReverseLines,
/// Reloads the file from disk.
@@ -883,6 +881,8 @@ actions!(
UnwrapSyntaxNode,
/// Wraps selections in tag specified by language.
WrapSelectionsInTag,
+ /// Aligns selections from different rows into the same column
+ AlignSelections,
]
);
@@ -392,6 +392,20 @@ where
&bracket_colors_markup(&mut cx),
"All markdown brackets should be colored based on their depth, again"
);
+
+ cx.set_state(indoc! {r#"ˇ('')('')
+
+((''))('')
+
+('')((''))"#});
+ cx.executor().advance_clock(Duration::from_millis(100));
+ cx.executor().run_until_parked();
+
+ assert_eq!(
+ "«1('')1»«1('')1»\n\n«1(«2('')2»)1»«1('')1»\n\n«1('')1»«1(«2('')2»)1»\n1 hsla(207.80, 16.20%, 69.19%, 1.00)\n2 hsla(29.00, 54.00%, 65.88%, 1.00)\n",
+ &bracket_colors_markup(&mut cx),
+ "Markdown quote pairs should not interfere with parenthesis pairing"
+ );
}
#[gpui::test]
@@ -2320,6 +2320,19 @@ impl DisplaySnapshot {
if !line_indent.is_line_blank()
&& line_indent.raw_len() <= start_line_indent.raw_len()
{
+ if self
+ .buffer_snapshot()
+ .language_scope_at(Point::new(row, 0))
+ .is_some_and(|scope| {
+ matches!(
+ scope.override_name(),
+ Some("string") | Some("comment") | Some("comment.inclusive")
+ )
+ })
+ {
+ continue;
+ }
+
let prev_row = row - 1;
end = Some(Point::new(
prev_row,
@@ -1,11 +1,17 @@
use edit_prediction_types::{
EditPredictionDelegate, EditPredictionIconSet, PredictedCursorPosition,
};
-use gpui::{Entity, KeyBinding, Modifiers, prelude::*};
+use gpui::{
+ Entity, KeyBinding, KeybindingKeystroke, Keystroke, Modifiers, NoAction, Task, prelude::*,
+};
use indoc::indoc;
-use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
+use language::EditPredictionsMode;
+use language::{Buffer, CodeLabel};
+use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot, ToPoint};
+use project::{Completion, CompletionResponse, CompletionSource};
use std::{
ops::Range,
+ rc::Rc,
sync::{
Arc,
atomic::{self, AtomicUsize},
@@ -15,7 +21,10 @@ use text::{Point, ToOffset};
use ui::prelude::*;
use crate::{
- AcceptEditPrediction, EditPrediction, MenuEditPredictionsPolicy, editor_tests::init_test,
+ AcceptEditPrediction, CompletionContext, CompletionProvider, EditPrediction,
+ EditPredictionKeybindAction, EditPredictionKeybindSurface, MenuEditPredictionsPolicy,
+ ShowCompletions,
+ editor_tests::{init_test, update_test_language_settings},
test::editor_test_context::EditorTestContext,
};
use rpc::proto::PeerId;
@@ -478,6 +487,537 @@ async fn test_edit_prediction_preview_cleanup_on_toggle_off(cx: &mut gpui::TestA
});
}
+#[gpui::test]
+async fn test_edit_prediction_preview_activates_when_prediction_arrives_with_modifier_held(
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx, |_| {});
+ load_default_keymap(cx);
+ update_test_language_settings(cx, &|settings| {
+ settings.edit_predictions.get_or_insert_default().mode = Some(EditPredictionsMode::Subtle);
+ });
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let provider = cx.new(|_| FakeEditPredictionDelegate::default());
+ assign_editor_completion_provider(provider.clone(), &mut cx);
+ cx.set_state("let x = ˇ;");
+
+ cx.editor(|editor, _, _| {
+ assert!(!editor.has_active_edit_prediction());
+ assert!(!editor.edit_prediction_preview_is_active());
+ });
+
+ let preview_modifiers = cx.update_editor(|editor, window, cx| {
+ *editor
+ .preview_edit_prediction_keystroke(window, cx)
+ .unwrap()
+ .modifiers()
+ });
+
+ cx.simulate_modifiers_change(preview_modifiers);
+ cx.run_until_parked();
+
+ cx.editor(|editor, _, _| {
+ assert!(!editor.has_active_edit_prediction());
+ assert!(editor.edit_prediction_preview_is_active());
+ });
+
+ propose_edits(&provider, vec![(8..8, "42")], &mut cx);
+ cx.update_editor(|editor, window, cx| {
+ editor.set_menu_edit_predictions_policy(MenuEditPredictionsPolicy::ByProvider);
+ editor.update_visible_edit_prediction(window, cx)
+ });
+
+ cx.editor(|editor, _, _| {
+ assert!(editor.has_active_edit_prediction());
+ assert!(
+ editor.edit_prediction_preview_is_active(),
+ "prediction preview should activate immediately when the prediction arrives while the preview modifier is still held",
+ );
+ });
+}
+
+fn load_default_keymap(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| {
+ cx.bind_keys(
+ settings::KeymapFile::load_asset_allow_partial_failure(
+ settings::DEFAULT_KEYMAP_PATH,
+ cx,
+ )
+ .expect("failed to load default keymap"),
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_inline_edit_prediction_keybind_selection_cases(cx: &mut gpui::TestAppContext) {
+ enum InlineKeybindState {
+ Normal,
+ ShowingCompletions,
+ InLeadingWhitespace,
+ ShowingCompletionsAndLeadingWhitespace,
+ }
+
+ enum ExpectedKeystroke {
+ DefaultAccept,
+ DefaultPreview,
+ Literal(&'static str),
+ }
+
+ struct InlineKeybindCase {
+ name: &'static str,
+ use_default_keymap: bool,
+ mode: EditPredictionsMode,
+ extra_bindings: Vec<KeyBinding>,
+ state: InlineKeybindState,
+ expected_accept_keystroke: ExpectedKeystroke,
+ expected_preview_keystroke: ExpectedKeystroke,
+ expected_displayed_keystroke: ExpectedKeystroke,
+ }
+
+ init_test(cx, |_| {});
+ load_default_keymap(cx);
+ let mut default_cx = EditorTestContext::new(cx).await;
+ let provider = default_cx.new(|_| FakeEditPredictionDelegate::default());
+ assign_editor_completion_provider(provider.clone(), &mut default_cx);
+ default_cx.set_state("let x = ˇ;");
+ propose_edits(&provider, vec![(8..8, "42")], &mut default_cx);
+ default_cx
+ .update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx));
+
+ let (default_accept_keystroke, default_preview_keystroke) =
+ default_cx.update_editor(|editor, window, cx| {
+ let keybind_display = editor.edit_prediction_keybind_display(
+ EditPredictionKeybindSurface::Inline,
+ window,
+ cx,
+ );
+ let accept_keystroke = keybind_display
+ .accept_keystroke
+ .as_ref()
+ .expect("default inline edit prediction should have an accept binding")
+ .clone();
+ let preview_keystroke = keybind_display
+ .preview_keystroke
+ .as_ref()
+ .expect("default inline edit prediction should have a preview binding")
+ .clone();
+ (accept_keystroke, preview_keystroke)
+ });
+
+ let cases = [
+ InlineKeybindCase {
+ name: "default setup prefers tab over alt-tab for accept",
+ use_default_keymap: true,
+ mode: EditPredictionsMode::Eager,
+ extra_bindings: Vec::new(),
+ state: InlineKeybindState::Normal,
+ expected_accept_keystroke: ExpectedKeystroke::DefaultAccept,
+ expected_preview_keystroke: ExpectedKeystroke::DefaultPreview,
+ expected_displayed_keystroke: ExpectedKeystroke::DefaultAccept,
+ },
+ InlineKeybindCase {
+ name: "subtle mode displays preview binding inline",
+ use_default_keymap: true,
+ mode: EditPredictionsMode::Subtle,
+ extra_bindings: Vec::new(),
+ state: InlineKeybindState::Normal,
+ expected_accept_keystroke: ExpectedKeystroke::DefaultPreview,
+ expected_preview_keystroke: ExpectedKeystroke::DefaultPreview,
+ expected_displayed_keystroke: ExpectedKeystroke::DefaultPreview,
+ },
+ InlineKeybindCase {
+ name: "removing default tab binding still displays tab",
+ use_default_keymap: true,
+ mode: EditPredictionsMode::Eager,
+ extra_bindings: vec![KeyBinding::new(
+ "tab",
+ NoAction,
+ Some("Editor && edit_prediction && edit_prediction_mode == eager"),
+ )],
+ state: InlineKeybindState::Normal,
+ expected_accept_keystroke: ExpectedKeystroke::DefaultPreview,
+ expected_preview_keystroke: ExpectedKeystroke::DefaultPreview,
+ expected_displayed_keystroke: ExpectedKeystroke::DefaultPreview,
+ },
+ InlineKeybindCase {
+ name: "custom-only rebound accept key uses replacement key",
+ use_default_keymap: true,
+ mode: EditPredictionsMode::Eager,
+ extra_bindings: vec![KeyBinding::new(
+ "ctrl-enter",
+ AcceptEditPrediction,
+ Some("Editor && edit_prediction"),
+ )],
+ state: InlineKeybindState::Normal,
+ expected_accept_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_preview_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_displayed_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ },
+ InlineKeybindCase {
+ name: "showing completions restores conflict-context binding",
+ use_default_keymap: true,
+ mode: EditPredictionsMode::Eager,
+ extra_bindings: vec![KeyBinding::new(
+ "ctrl-enter",
+ AcceptEditPrediction,
+ Some("Editor && edit_prediction && showing_completions"),
+ )],
+ state: InlineKeybindState::ShowingCompletions,
+ expected_accept_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_preview_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_displayed_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ },
+ InlineKeybindCase {
+ name: "leading whitespace restores conflict-context binding",
+ use_default_keymap: false,
+ mode: EditPredictionsMode::Eager,
+ extra_bindings: vec![KeyBinding::new(
+ "ctrl-enter",
+ AcceptEditPrediction,
+ Some("Editor && edit_prediction && in_leading_whitespace"),
+ )],
+ state: InlineKeybindState::InLeadingWhitespace,
+ expected_accept_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_preview_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_displayed_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ },
+ InlineKeybindCase {
+ name: "showing completions and leading whitespace restore combined conflict binding",
+ use_default_keymap: false,
+ mode: EditPredictionsMode::Eager,
+ extra_bindings: vec![KeyBinding::new(
+ "ctrl-enter",
+ AcceptEditPrediction,
+ Some("Editor && edit_prediction && showing_completions && in_leading_whitespace"),
+ )],
+ state: InlineKeybindState::ShowingCompletionsAndLeadingWhitespace,
+ expected_accept_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_preview_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ expected_displayed_keystroke: ExpectedKeystroke::Literal("ctrl-enter"),
+ },
+ ];
+
+ for case in cases {
+ init_test(cx, |_| {});
+ if case.use_default_keymap {
+ load_default_keymap(cx);
+ }
+ update_test_language_settings(cx, &|settings| {
+ settings.edit_predictions.get_or_insert_default().mode = Some(case.mode);
+ });
+
+ if !case.extra_bindings.is_empty() {
+ cx.update(|cx| cx.bind_keys(case.extra_bindings.clone()));
+ }
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let provider = cx.new(|_| FakeEditPredictionDelegate::default());
+ assign_editor_completion_provider(provider.clone(), &mut cx);
+
+ match case.state {
+ InlineKeybindState::Normal | InlineKeybindState::ShowingCompletions => {
+ cx.set_state("let x = ˇ;");
+ }
+ InlineKeybindState::InLeadingWhitespace
+ | InlineKeybindState::ShowingCompletionsAndLeadingWhitespace => {
+ cx.set_state(indoc! {"
+ fn main() {
+ ˇ
+ }
+ "});
+ }
+ }
+
+ propose_edits(&provider, vec![(8..8, "42")], &mut cx);
+ cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx));
+
+ if matches!(
+ case.state,
+ InlineKeybindState::ShowingCompletions
+ | InlineKeybindState::ShowingCompletionsAndLeadingWhitespace
+ ) {
+ assign_editor_completion_menu_provider(&mut cx);
+ cx.update_editor(|editor, window, cx| {
+ editor.show_completions(&ShowCompletions, window, cx);
+ });
+ cx.run_until_parked();
+ }
+
+ cx.update_editor(|editor, window, cx| {
+ assert!(
+ editor.has_active_edit_prediction(),
+ "case '{}' should have an active edit prediction",
+ case.name
+ );
+
+ let keybind_display = editor.edit_prediction_keybind_display(
+ EditPredictionKeybindSurface::Inline,
+ window,
+ cx,
+ );
+ let accept_keystroke = keybind_display
+ .accept_keystroke
+ .as_ref()
+ .unwrap_or_else(|| panic!("case '{}' should have an accept binding", case.name));
+ let preview_keystroke = keybind_display
+ .preview_keystroke
+ .as_ref()
+ .unwrap_or_else(|| panic!("case '{}' should have a preview binding", case.name));
+ let displayed_keystroke = keybind_display
+ .displayed_keystroke
+ .as_ref()
+ .unwrap_or_else(|| panic!("case '{}' should have a displayed binding", case.name));
+
+ let expected_accept_keystroke = match case.expected_accept_keystroke {
+ ExpectedKeystroke::DefaultAccept => default_accept_keystroke.clone(),
+ ExpectedKeystroke::DefaultPreview => default_preview_keystroke.clone(),
+ ExpectedKeystroke::Literal(keystroke) => KeybindingKeystroke::from_keystroke(
+ Keystroke::parse(keystroke).expect("expected test keystroke to parse"),
+ ),
+ };
+ let expected_preview_keystroke = match case.expected_preview_keystroke {
+ ExpectedKeystroke::DefaultAccept => default_accept_keystroke.clone(),
+ ExpectedKeystroke::DefaultPreview => default_preview_keystroke.clone(),
+ ExpectedKeystroke::Literal(keystroke) => KeybindingKeystroke::from_keystroke(
+ Keystroke::parse(keystroke).expect("expected test keystroke to parse"),
+ ),
+ };
+ let expected_displayed_keystroke = match case.expected_displayed_keystroke {
+ ExpectedKeystroke::DefaultAccept => default_accept_keystroke.clone(),
+ ExpectedKeystroke::DefaultPreview => default_preview_keystroke.clone(),
+ ExpectedKeystroke::Literal(keystroke) => KeybindingKeystroke::from_keystroke(
+ Keystroke::parse(keystroke).expect("expected test keystroke to parse"),
+ ),
+ };
+
+ assert_eq!(
+ accept_keystroke, &expected_accept_keystroke,
+ "case '{}' selected the wrong accept binding",
+ case.name
+ );
+ assert_eq!(
+ preview_keystroke, &expected_preview_keystroke,
+ "case '{}' selected the wrong preview binding",
+ case.name
+ );
+ assert_eq!(
+ displayed_keystroke, &expected_displayed_keystroke,
+ "case '{}' selected the wrong displayed binding",
+ case.name
+ );
+
+ if matches!(case.mode, EditPredictionsMode::Subtle) {
+ assert!(
+ editor.edit_prediction_requires_modifier(),
+ "case '{}' should require a modifier",
+ case.name
+ );
+ }
+ });
+ }
+}
+
+#[gpui::test]
+async fn test_tab_accepts_edit_prediction_over_completion(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ load_default_keymap(cx);
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let provider = cx.new(|_| FakeEditPredictionDelegate::default());
+ assign_editor_completion_provider(provider.clone(), &mut cx);
+ cx.set_state("let x = ˇ;");
+
+ propose_edits(&provider, vec![(8..8, "42")], &mut cx);
+ cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx));
+
+ assert_editor_active_edit_completion(&mut cx, |_, edits| {
+ assert_eq!(edits.len(), 1);
+ assert_eq!(edits[0].1.as_ref(), "42");
+ });
+
+ cx.simulate_keystroke("tab");
+ cx.run_until_parked();
+
+ cx.assert_editor_state("let x = 42ˇ;");
+}
+
+#[gpui::test]
+async fn test_cursor_popover_edit_prediction_keybind_cases(cx: &mut gpui::TestAppContext) {
+ enum CursorPopoverPredictionKind {
+ SingleLine,
+ MultiLine,
+ SingleLineWithPreview,
+ MultiLineWithPreview,
+ DeleteSingleNewline,
+ StaleSingleLineAfterMultiLine,
+ }
+
+ struct CursorPopoverCase {
+ name: &'static str,
+ prediction_kind: CursorPopoverPredictionKind,
+ expected_action: EditPredictionKeybindAction,
+ }
+
+ let cases = [
+ CursorPopoverCase {
+ name: "single line prediction uses accept action",
+ prediction_kind: CursorPopoverPredictionKind::SingleLine,
+ expected_action: EditPredictionKeybindAction::Accept,
+ },
+ CursorPopoverCase {
+ name: "multi line prediction uses preview action",
+ prediction_kind: CursorPopoverPredictionKind::MultiLine,
+ expected_action: EditPredictionKeybindAction::Preview,
+ },
+ CursorPopoverCase {
+ name: "single line prediction with preview still uses accept action",
+ prediction_kind: CursorPopoverPredictionKind::SingleLineWithPreview,
+ expected_action: EditPredictionKeybindAction::Accept,
+ },
+ CursorPopoverCase {
+ name: "multi line prediction with preview uses preview action",
+ prediction_kind: CursorPopoverPredictionKind::MultiLineWithPreview,
+ expected_action: EditPredictionKeybindAction::Preview,
+ },
+ CursorPopoverCase {
+ name: "single line newline deletion uses accept action",
+ prediction_kind: CursorPopoverPredictionKind::DeleteSingleNewline,
+ expected_action: EditPredictionKeybindAction::Accept,
+ },
+ CursorPopoverCase {
+ name: "stale multi line prediction does not force preview action",
+ prediction_kind: CursorPopoverPredictionKind::StaleSingleLineAfterMultiLine,
+ expected_action: EditPredictionKeybindAction::Accept,
+ },
+ ];
+
+ for case in cases {
+ init_test(cx, |_| {});
+ load_default_keymap(cx);
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let provider = cx.new(|_| FakeEditPredictionDelegate::default());
+ assign_editor_completion_provider(provider.clone(), &mut cx);
+
+ match case.prediction_kind {
+ CursorPopoverPredictionKind::SingleLine => {
+ cx.set_state("let x = ˇ;");
+ propose_edits(&provider, vec![(8..8, "42")], &mut cx);
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ }
+ CursorPopoverPredictionKind::MultiLine => {
+ cx.set_state("let x = ˇ;");
+ propose_edits(&provider, vec![(8..8, "42\n43")], &mut cx);
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ }
+ CursorPopoverPredictionKind::SingleLineWithPreview => {
+ cx.set_state("let x = ˇ;");
+ propose_edits_with_preview(&provider, vec![(8..8, "42")], &mut cx).await;
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ }
+ CursorPopoverPredictionKind::MultiLineWithPreview => {
+ cx.set_state("let x = ˇ;");
+ propose_edits_with_preview(&provider, vec![(8..8, "42\n43")], &mut cx).await;
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ }
+ CursorPopoverPredictionKind::DeleteSingleNewline => {
+ cx.set_state(indoc! {"
+ fn main() {
+ let value = 1;
+ ˇprintln!(\"done\");
+ }
+ "});
+ propose_edits(
+ &provider,
+ vec![(Point::new(1, 18)..Point::new(2, 17), "")],
+ &mut cx,
+ );
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ }
+ CursorPopoverPredictionKind::StaleSingleLineAfterMultiLine => {
+ cx.set_state("let x = ˇ;");
+ propose_edits(&provider, vec![(8..8, "42\n43")], &mut cx);
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ cx.update_editor(|editor, _window, cx| {
+ assert!(editor.active_edit_prediction.is_some());
+ assert!(editor.stale_edit_prediction_in_menu.is_none());
+ editor.take_active_edit_prediction(cx);
+ assert!(editor.active_edit_prediction.is_none());
+ assert!(editor.stale_edit_prediction_in_menu.is_some());
+ });
+
+ propose_edits(&provider, vec![(8..8, "42")], &mut cx);
+ cx.update_editor(|editor, window, cx| {
+ editor.update_visible_edit_prediction(window, cx)
+ });
+ }
+ }
+
+ cx.update_editor(|editor, window, cx| {
+ assert!(
+ editor.has_active_edit_prediction(),
+ "case '{}' should have an active edit prediction",
+ case.name
+ );
+
+ let keybind_display = editor.edit_prediction_keybind_display(
+ EditPredictionKeybindSurface::CursorPopoverExpanded,
+ window,
+ cx,
+ );
+ let accept_keystroke = keybind_display
+ .accept_keystroke
+ .as_ref()
+ .unwrap_or_else(|| panic!("case '{}' should have an accept binding", case.name));
+ let preview_keystroke = keybind_display
+ .preview_keystroke
+ .as_ref()
+ .unwrap_or_else(|| panic!("case '{}' should have a preview binding", case.name));
+
+ assert_eq!(
+ keybind_display.action, case.expected_action,
+ "case '{}' selected the wrong cursor popover action",
+ case.name
+ );
+ assert_eq!(
+ accept_keystroke.key(),
+ "tab",
+ "case '{}' selected the wrong accept binding",
+ case.name
+ );
+ assert!(
+ preview_keystroke.modifiers().modified(),
+ "case '{}' should use a modified preview binding",
+ case.name
+ );
+
+ if matches!(
+ case.prediction_kind,
+ CursorPopoverPredictionKind::StaleSingleLineAfterMultiLine
+ ) {
+ assert!(
+ editor.stale_edit_prediction_in_menu.is_none(),
+ "case '{}' should clear stale menu state",
+ case.name
+ );
+ }
+ });
+ }
+}
+
fn assert_editor_active_edit_completion(
cx: &mut EditorTestContext,
assert: impl FnOnce(MultiBufferSnapshot, &Vec<(Range<Anchor>, Arc<str>)>),
@@ -528,6 +1068,44 @@ fn propose_edits<T: ToOffset>(
propose_edits_with_cursor_position(provider, edits, None, cx);
}
+async fn propose_edits_with_preview<T: ToOffset + Clone>(
+ provider: &Entity<FakeEditPredictionDelegate>,
+ edits: Vec<(Range<T>, &str)>,
+ cx: &mut EditorTestContext,
+) {
+ let snapshot = cx.buffer_snapshot();
+ let edits = edits
+ .into_iter()
+ .map(|(range, text)| {
+ let anchor_range =
+ snapshot.anchor_after(range.start.clone())..snapshot.anchor_before(range.end);
+ (anchor_range, Arc::<str>::from(text))
+ })
+ .collect::<Vec<_>>();
+
+ let preview_edits = edits
+ .iter()
+ .map(|(range, text)| (range.clone(), text.clone()))
+ .collect::<Arc<[_]>>();
+
+ let edit_preview = cx
+ .buffer(|buffer: &Buffer, app| buffer.preview_edits(preview_edits, app))
+ .await;
+
+ let provider_edits = edits.into_iter().collect();
+
+ cx.update(|_, cx| {
+ provider.update(cx, |provider, _| {
+ provider.set_edit_prediction(Some(edit_prediction_types::EditPrediction::Local {
+ id: None,
+ edits: provider_edits,
+ cursor_position: None,
+ edit_preview: Some(edit_preview),
+ }))
+ })
+ });
+}
+
fn propose_edits_with_cursor_position<T: ToOffset>(
provider: &Entity<FakeEditPredictionDelegate>,
edits: Vec<(Range<T>, &str)>,
@@ -594,6 +1172,12 @@ fn assign_editor_completion_provider(
})
}
+fn assign_editor_completion_menu_provider(cx: &mut EditorTestContext) {
+ cx.update_editor(|editor, _, _| {
+ editor.set_completion_provider(Some(Rc::new(FakeCompletionMenuProvider)));
+ });
+}
+
fn propose_edits_non_zed<T: ToOffset>(
provider: &Entity<FakeNonZedEditPredictionDelegate>,
edits: Vec<(Range<T>, &str)>,
@@ -626,6 +1210,54 @@ fn assign_editor_completion_provider_non_zed(
})
}
+struct FakeCompletionMenuProvider;
+
+impl CompletionProvider for FakeCompletionMenuProvider {
+ fn completions(
+ &self,
+ _excerpt_id: ExcerptId,
+ _buffer: &Entity<Buffer>,
+ _buffer_position: text::Anchor,
+ _trigger: CompletionContext,
+ _window: &mut Window,
+ _cx: &mut Context<crate::Editor>,
+ ) -> Task<anyhow::Result<Vec<CompletionResponse>>> {
+ let completion = Completion {
+ replace_range: text::Anchor::MIN..text::Anchor::MAX,
+ new_text: "fake_completion".to_string(),
+ label: CodeLabel::plain("fake_completion".to_string(), None),
+ documentation: None,
+ source: CompletionSource::Custom,
+ icon_path: None,
+ match_start: None,
+ snippet_deduplication_key: None,
+ insert_text_mode: None,
+ confirm: None,
+ };
+
+ Task::ready(Ok(vec![CompletionResponse {
+ completions: vec![completion],
+ display_options: Default::default(),
+ is_incomplete: false,
+ }]))
+ }
+
+ fn is_completion_trigger(
+ &self,
+ _buffer: &Entity<Buffer>,
+ _position: language::Anchor,
+ _text: &str,
+ _trigger_in_words: bool,
+ _cx: &mut Context<crate::Editor>,
+ ) -> bool {
+ false
+ }
+
+ fn filter_completions(&self) -> bool {
+ false
+ }
+}
+
#[derive(Default, Clone)]
pub struct FakeEditPredictionDelegate {
pub completion: Option<edit_prediction_types::EditPrediction>,
@@ -105,7 +105,7 @@ use edit_prediction_types::{
EditPredictionGranularity, SuggestionDisplayType,
};
use editor_settings::{GoToDefinitionFallback, Minimap as MinimapSettings};
-use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap, layout_line};
+use element::{LineWithInvisibles, PositionMap, layout_line};
use futures::{
FutureExt,
future::{self, Shared, join},
@@ -153,7 +153,7 @@ use multi_buffer::{
ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow,
};
use parking_lot::Mutex;
-use persistence::DB;
+use persistence::EditorDb;
use project::{
BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent,
CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, InlayId,
@@ -221,6 +221,7 @@ use workspace::{
notifications::{DetachAndPromptErr, NotificationId, NotifyTaskExt},
searchable::SearchEvent,
};
+pub use zed_actions::editor::RevealInFileManager;
use zed_actions::editor::{MoveDown, MoveUp};
use crate::{
@@ -256,7 +257,6 @@ pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration:
pub const LSP_REQUEST_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(50);
pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction";
-pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict";
pub(crate) const MINIMAP_FONT_SIZE: AbsoluteLength = AbsoluteLength::Pixels(px(2.));
pub type RenderDiffHunkControlsFn = Arc<
@@ -701,6 +701,30 @@ pub enum EditPredictionPreview {
},
}
+#[derive(Copy, Clone, Eq, PartialEq)]
+enum EditPredictionKeybindSurface {
+ Inline,
+ CursorPopoverCompact,
+ CursorPopoverExpanded,
+}
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug)]
+enum EditPredictionKeybindAction {
+ Accept,
+ Preview,
+}
+
+struct EditPredictionKeybindDisplay {
+ #[cfg(test)]
+ accept_keystroke: Option<gpui::KeybindingKeystroke>,
+ #[cfg(test)]
+ preview_keystroke: Option<gpui::KeybindingKeystroke>,
+ displayed_keystroke: Option<gpui::KeybindingKeystroke>,
+ action: EditPredictionKeybindAction,
+ missing_accept_keystroke: bool,
+ show_hold_label: bool,
+}
+
impl EditPredictionPreview {
pub fn released_too_fast(&self) -> bool {
match self {
@@ -1225,8 +1249,7 @@ pub struct Editor {
show_completions_on_input_override: Option<bool>,
menu_edit_predictions_policy: MenuEditPredictionsPolicy,
edit_prediction_preview: EditPredictionPreview,
- edit_prediction_indent_conflict: bool,
- edit_prediction_requires_modifier_in_indent_conflict: bool,
+ in_leading_whitespace: bool,
next_inlay_id: usize,
next_color_inlay_id: usize,
_subscriptions: Vec<Subscription>,
@@ -2473,8 +2496,7 @@ impl Editor {
show_completions_on_input_override: None,
menu_edit_predictions_policy: MenuEditPredictionsPolicy::ByProvider,
edit_prediction_settings: EditPredictionSettings::Disabled,
- edit_prediction_indent_conflict: false,
- edit_prediction_requires_modifier_in_indent_conflict: true,
+ in_leading_whitespace: false,
custom_context_menu: None,
show_git_blame_gutter: false,
show_git_blame_inline: false,
@@ -2856,12 +2878,17 @@ impl Editor {
}
if has_active_edit_prediction {
- if self.edit_prediction_in_conflict() {
- key_context.add(EDIT_PREDICTION_CONFLICT_KEY_CONTEXT);
- } else {
- key_context.add(EDIT_PREDICTION_KEY_CONTEXT);
- key_context.add("copilot_suggestion");
- }
+ key_context.add(EDIT_PREDICTION_KEY_CONTEXT);
+ key_context.add("copilot_suggestion");
+ }
+
+ if self.in_leading_whitespace {
+ key_context.add("in_leading_whitespace");
+ }
+ if self.edit_prediction_requires_modifier() {
+ key_context.set("edit_prediction_mode", "subtle")
+ } else {
+ key_context.set("edit_prediction_mode", "eager");
}
if self.selection_mark_mode {
@@ -2869,14 +2896,23 @@ impl Editor {
}
let disjoint = self.selections.disjoint_anchors();
- let snapshot = self.snapshot(window, cx);
- let snapshot = snapshot.buffer_snapshot();
- if self.mode == EditorMode::SingleLine
- && let [selection] = disjoint
+ if matches!(
+ &self.mode,
+ EditorMode::SingleLine | EditorMode::AutoHeight { .. }
+ ) && let [selection] = disjoint
&& selection.start == selection.end
- && selection.end.to_offset(snapshot) == snapshot.len()
{
- key_context.add("end_of_input");
+ let snapshot = self.snapshot(window, cx);
+ let snapshot = snapshot.buffer_snapshot();
+ let caret_offset = selection.end.to_offset(snapshot);
+
+ if caret_offset == MultiBufferOffset(0) {
+ key_context.add("start_of_input");
+ }
+
+ if caret_offset == snapshot.len() {
+ key_context.add("end_of_input");
+ }
}
if self.has_any_expanded_diff_hunks(cx) {
@@ -2915,32 +2951,13 @@ impl Editor {
}
}
- pub fn edit_prediction_in_conflict(&self) -> bool {
- if !self.show_edit_predictions_in_menu() {
- return false;
- }
-
- let showing_completions = self
- .context_menu
- .borrow()
- .as_ref()
- .is_some_and(|context| matches!(context, CodeContextMenu::Completions(_)));
-
- showing_completions
- || self.edit_prediction_requires_modifier()
- // Require modifier key when the cursor is on leading whitespace, to allow `tab`
- // bindings to insert tab characters.
- || (self.edit_prediction_requires_modifier_in_indent_conflict && self.edit_prediction_indent_conflict)
- }
-
- pub fn accept_edit_prediction_keybind(
+ fn accept_edit_prediction_keystroke(
&self,
granularity: EditPredictionGranularity,
window: &mut Window,
cx: &mut App,
- ) -> AcceptEditPredictionBinding {
+ ) -> Option<gpui::KeybindingKeystroke> {
let key_context = self.key_context_internal(true, window, cx);
- let in_conflict = self.edit_prediction_in_conflict();
let bindings =
match granularity {
@@ -2953,13 +2970,157 @@ impl Editor {
}
};
- AcceptEditPredictionBinding(bindings.into_iter().rev().find(|binding| {
- !in_conflict
- || binding
- .keystrokes()
- .first()
- .is_some_and(|keystroke| keystroke.modifiers().modified())
- }))
+ bindings
+ .into_iter()
+ .rev()
+ .find_map(|binding| match binding.keystrokes() {
+ [keystroke, ..] => Some(keystroke.clone()),
+ _ => None,
+ })
+ }
+
+ fn preview_edit_prediction_keystroke(
+ &self,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<gpui::KeybindingKeystroke> {
+ let key_context = self.key_context_internal(true, window, cx);
+ let bindings = window.bindings_for_action_in_context(&AcceptEditPrediction, key_context);
+ bindings
+ .into_iter()
+ .rev()
+ .find_map(|binding| match binding.keystrokes() {
+ [keystroke, ..] if keystroke.modifiers().modified() => Some(keystroke.clone()),
+ _ => None,
+ })
+ }
+
+ fn edit_prediction_preview_modifiers_held(
+ &self,
+ modifiers: &Modifiers,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> bool {
+ let key_context = self.key_context_internal(true, window, cx);
+ let actions: [&dyn Action; 3] = [
+ &AcceptEditPrediction,
+ &AcceptNextWordEditPrediction,
+ &AcceptNextLineEditPrediction,
+ ];
+
+ actions.into_iter().any(|action| {
+ window
+ .bindings_for_action_in_context(action, key_context.clone())
+ .into_iter()
+ .rev()
+ .any(|binding| {
+ binding.keystrokes().first().is_some_and(|keystroke| {
+ keystroke.modifiers().modified() && keystroke.modifiers() == modifiers
+ })
+ })
+ })
+ }
+
+ fn edit_prediction_cursor_popover_prefers_preview(
+ &self,
+ completion: &EditPredictionState,
+ ) -> bool {
+ match &completion.completion {
+ EditPrediction::Edit {
+ edits, snapshot, ..
+ } => {
+ let mut start_row: Option<u32> = None;
+ let mut end_row: Option<u32> = None;
+
+ for (range, text) in edits {
+ let edit_start_row = range.start.text_anchor.to_point(snapshot).row;
+ let old_end_row = range.end.text_anchor.to_point(snapshot).row;
+ let inserted_newline_count = text
+ .as_ref()
+ .chars()
+ .filter(|character| *character == '\n')
+ .count() as u32;
+ let deleted_newline_count = old_end_row - edit_start_row;
+ let preview_end_row = edit_start_row + inserted_newline_count;
+
+ start_row =
+ Some(start_row.map_or(edit_start_row, |row| row.min(edit_start_row)));
+ end_row = Some(end_row.map_or(preview_end_row, |row| row.max(preview_end_row)));
+
+ if deleted_newline_count > 1 {
+ end_row = Some(end_row.map_or(old_end_row, |row| row.max(old_end_row)));
+ }
+ }
+
+ start_row
+ .zip(end_row)
+ .is_some_and(|(start_row, end_row)| end_row > start_row)
+ }
+ EditPrediction::MoveWithin { .. } | EditPrediction::MoveOutside { .. } => false,
+ }
+ }
+
+ fn edit_prediction_keybind_display(
+ &self,
+ surface: EditPredictionKeybindSurface,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> EditPredictionKeybindDisplay {
+ let accept_keystroke =
+ self.accept_edit_prediction_keystroke(EditPredictionGranularity::Full, window, cx);
+ let preview_keystroke = self.preview_edit_prediction_keystroke(window, cx);
+
+ let action = match surface {
+ EditPredictionKeybindSurface::Inline
+ | EditPredictionKeybindSurface::CursorPopoverCompact => {
+ if self.edit_prediction_requires_modifier() {
+ EditPredictionKeybindAction::Preview
+ } else {
+ EditPredictionKeybindAction::Accept
+ }
+ }
+ EditPredictionKeybindSurface::CursorPopoverExpanded => self
+ .active_edit_prediction
+ .as_ref()
+ .filter(|completion| {
+ self.edit_prediction_cursor_popover_prefers_preview(completion)
+ })
+ .map_or(EditPredictionKeybindAction::Accept, |_| {
+ EditPredictionKeybindAction::Preview
+ }),
+ };
+ #[cfg(test)]
+ let preview_copy = preview_keystroke.clone();
+ #[cfg(test)]
+ let accept_copy = accept_keystroke.clone();
+
+ let displayed_keystroke = match surface {
+ EditPredictionKeybindSurface::Inline => match action {
+ EditPredictionKeybindAction::Accept => accept_keystroke,
+ EditPredictionKeybindAction::Preview => preview_keystroke,
+ },
+ EditPredictionKeybindSurface::CursorPopoverCompact
+ | EditPredictionKeybindSurface::CursorPopoverExpanded => match action {
+ EditPredictionKeybindAction::Accept => accept_keystroke,
+ EditPredictionKeybindAction::Preview => {
+ preview_keystroke.or_else(|| accept_keystroke.clone())
+ }
+ },
+ };
+
+ let missing_accept_keystroke = displayed_keystroke.is_none();
+
+ EditPredictionKeybindDisplay {
+ #[cfg(test)]
+ accept_keystroke: accept_copy,
+ #[cfg(test)]
+ preview_keystroke: preview_copy,
+ displayed_keystroke,
+ action,
+ missing_accept_keystroke,
+ show_hold_label: matches!(surface, EditPredictionKeybindSurface::CursorPopoverCompact)
+ && self.edit_prediction_preview.released_too_fast(),
+ }
}
pub fn new_file(
@@ -3596,7 +3757,6 @@ impl Editor {
self.refresh_matching_bracket_highlights(&display_map, cx);
self.refresh_outline_symbols_at_cursor(cx);
self.update_visible_edit_prediction(window, cx);
- self.edit_prediction_requires_modifier_in_indent_conflict = true;
self.inline_blame_popover.take();
if self.git_blame_inline_enabled {
self.start_inline_blame_timer(window, cx);
@@ -3638,6 +3798,7 @@ impl Editor {
let selections = selections.clone();
let background_executor = cx.background_executor().clone();
let editor_id = cx.entity().entity_id().as_u64() as ItemId;
+ let db = EditorDb::global(cx);
self.serialize_selections = cx.background_spawn(async move {
background_executor.timer(SERIALIZATION_THROTTLE_TIME).await;
let db_selections = selections
@@ -3650,7 +3811,7 @@ impl Editor {
})
.collect();
- DB.save_editor_selections(editor_id, workspace_id, db_selections)
+ db.save_editor_selections(editor_id, workspace_id, db_selections)
.await
.with_context(|| {
format!(
@@ -3735,16 +3896,17 @@ impl Editor {
(start, end, start_fp, end_fp)
})
.collect::<Vec<_>>();
+ let db = EditorDb::global(cx);
self.serialize_folds = cx.background_spawn(async move {
background_executor.timer(SERIALIZATION_THROTTLE_TIME).await;
if db_folds.is_empty() {
// No folds - delete any persisted folds for this file
- DB.delete_file_folds(workspace_id, file_path)
+ db.delete_file_folds(workspace_id, file_path)
.await
.with_context(|| format!("deleting file folds for workspace {workspace_id:?}"))
.log_err();
} else {
- DB.save_file_folds(workspace_id, file_path, db_folds)
+ db.save_file_folds(workspace_id, file_path, db_folds)
.await
.with_context(|| {
format!("persisting file folds for workspace {workspace_id:?}")
@@ -8216,8 +8378,6 @@ impl Editor {
}
}
}
-
- self.edit_prediction_requires_modifier_in_indent_conflict = false;
}
pub fn accept_next_word_edit_prediction(
@@ -8369,9 +8529,12 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ self.update_edit_prediction_settings(cx);
+
// Ensure that the edit prediction preview is updated, even when not
// enabled, if there's an active edit prediction preview.
if self.show_edit_predictions_in_menu()
+ || self.edit_prediction_requires_modifier()
|| matches!(
self.edit_prediction_preview,
EditPredictionPreview::Active { .. }
@@ -8464,25 +8627,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let mut modifiers_held = false;
-
- // Check bindings for all granularities.
- // If the user holds the key for Word, Line, or Full, we want to show the preview.
- let granularities = [
- EditPredictionGranularity::Full,
- EditPredictionGranularity::Line,
- EditPredictionGranularity::Word,
- ];
-
- for granularity in granularities {
- if let Some(keystroke) = self
- .accept_edit_prediction_keybind(granularity, window, cx)
- .keystroke()
- {
- modifiers_held = modifiers_held
- || (keystroke.modifiers() == modifiers && keystroke.modifiers().modified());
- }
- }
+ let modifiers_held = self.edit_prediction_preview_modifiers_held(modifiers, window, cx);
if modifiers_held {
if matches!(
@@ -8580,9 +8725,9 @@ impl Editor {
self.edit_prediction_settings =
self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx);
- self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor);
+ self.in_leading_whitespace = multibuffer.is_line_whitespace_upto(cursor);
- if self.edit_prediction_indent_conflict {
+ if self.in_leading_whitespace {
let cursor_point = cursor.to_point(&multibuffer);
let mut suggested_indent = None;
multibuffer.suggested_indents_callback(
@@ -8597,7 +8742,7 @@ impl Editor {
if let Some(indent) = suggested_indent
&& indent.len == cursor_point.column
{
- self.edit_prediction_indent_conflict = false;
+ self.in_leading_whitespace = false;
}
}
@@ -9610,7 +9755,7 @@ impl Editor {
const BORDER_WIDTH: Pixels = px(1.);
- let keybind = self.render_edit_prediction_accept_keybind(window, cx);
+ let keybind = self.render_edit_prediction_keybind(window, cx);
let has_keybind = keybind.is_some();
let mut element = h_flex()
@@ -9766,49 +9911,81 @@ impl Editor {
}
}
- fn render_edit_prediction_accept_keybind(
+ fn render_edit_prediction_inline_keystroke(
&self,
- window: &mut Window,
- cx: &mut App,
- ) -> Option<AnyElement> {
- let accept_binding =
- self.accept_edit_prediction_keybind(EditPredictionGranularity::Full, window, cx);
- let accept_keystroke = accept_binding.keystroke()?;
-
+ keystroke: &gpui::KeybindingKeystroke,
+ modifiers_color: Color,
+ cx: &App,
+ ) -> AnyElement {
let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac;
- let modifiers_color = if *accept_keystroke.modifiers() == window.modifiers() {
- Color::Accent
- } else {
- Color::Muted
- };
-
h_flex()
.px_0p5()
.when(is_platform_style_mac, |parent| parent.gap_0p5())
.font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
.text_size(TextSize::XSmall.rems(cx))
.child(h_flex().children(ui::render_modifiers(
- accept_keystroke.modifiers(),
+ keystroke.modifiers(),
PlatformStyle::platform(),
Some(modifiers_color),
Some(IconSize::XSmall.rems().into()),
true,
)))
.when(is_platform_style_mac, |parent| {
- parent.child(accept_keystroke.key().to_string())
+ parent.child(keystroke.key().to_string())
})
.when(!is_platform_style_mac, |parent| {
parent.child(
- Key::new(
- util::capitalize(accept_keystroke.key()),
- Some(Color::Default),
- )
- .size(Some(IconSize::XSmall.rems().into())),
+ Key::new(util::capitalize(keystroke.key()), Some(Color::Default))
+ .size(Some(IconSize::XSmall.rems().into())),
)
})
.into_any()
- .into()
+ }
+
+ fn render_edit_prediction_popover_keystroke(
+ &self,
+ keystroke: &gpui::KeybindingKeystroke,
+ color: Color,
+ cx: &App,
+ ) -> AnyElement {
+ let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac;
+
+ if keystroke.modifiers().modified() {
+ h_flex()
+ .font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
+ .when(is_platform_style_mac, |parent| parent.gap_1())
+ .child(h_flex().children(ui::render_modifiers(
+ keystroke.modifiers(),
+ PlatformStyle::platform(),
+ Some(color),
+ None,
+ false,
+ )))
+ .into_any()
+ } else {
+ Key::new(util::capitalize(keystroke.key()), Some(color))
+ .size(Some(IconSize::XSmall.rems().into()))
+ .into_any_element()
+ }
+ }
+
+ fn render_edit_prediction_keybind(
+ &self,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<AnyElement> {
+ let keybind_display =
+ self.edit_prediction_keybind_display(EditPredictionKeybindSurface::Inline, window, cx);
+ let keystroke = keybind_display.displayed_keystroke.as_ref()?;
+
+ let modifiers_color = if *keystroke.modifiers() == window.modifiers() {
+ Color::Accent
+ } else {
+ Color::Muted
+ };
+
+ Some(self.render_edit_prediction_inline_keystroke(keystroke, modifiers_color, cx))
}
fn render_edit_prediction_line_popover(
@@ -9820,7 +9997,7 @@ impl Editor {
) -> Stateful<Div> {
let padding_right = if icon.is_some() { px(4.) } else { px(8.) };
- let keybind = self.render_edit_prediction_accept_keybind(window, cx);
+ let keybind = self.render_edit_prediction_keybind(window, cx);
let has_keybind = keybind.is_some();
let icons = Self::get_prediction_provider_icons(&self.edit_prediction_provider, cx);
@@ -9879,7 +10056,7 @@ impl Editor {
window: &mut Window,
cx: &mut App,
) -> Stateful<Div> {
- let keybind = self.render_edit_prediction_accept_keybind(window, cx);
+ let keybind = self.render_edit_prediction_keybind(window, cx);
let has_keybind = keybind.is_some();
let icons = Self::get_prediction_provider_icons(&self.edit_prediction_provider, cx);
@@ -9962,8 +10139,7 @@ impl Editor {
max_width: Pixels,
cursor_point: Point,
style: &EditorStyle,
- accept_keystroke: Option<&gpui::KeybindingKeystroke>,
- _window: &Window,
+ window: &mut Window,
cx: &mut Context<Editor>,
) -> Option<AnyElement> {
let provider = self.edit_prediction_provider.as_ref()?;
@@ -9980,13 +10156,18 @@ impl Editor {
if !self.has_visible_completions_menu() {
const RADIUS: Pixels = px(6.);
const BORDER_WIDTH: Pixels = px(1.);
+ let keybind_display = self.edit_prediction_keybind_display(
+ EditPredictionKeybindSurface::CursorPopoverCompact,
+ window,
+ cx,
+ );
return Some(
h_flex()
.elevation_2(cx)
.border(BORDER_WIDTH)
.border_color(cx.theme().colors().border)
- .when(accept_keystroke.is_none(), |el| {
+ .when(keybind_display.missing_accept_keystroke, |el| {
el.border_color(cx.theme().status().error)
})
.rounded(RADIUS)
@@ -10017,18 +10198,19 @@ impl Editor {
.border_l_1()
.border_color(cx.theme().colors().border)
.bg(Self::edit_prediction_line_popover_bg_color(cx))
- .when(self.edit_prediction_preview.released_too_fast(), |el| {
+ .when(keybind_display.show_hold_label, |el| {
el.child(
Label::new("Hold")
.size(LabelSize::Small)
- .when(accept_keystroke.is_none(), |el| {
- el.strikethrough()
- })
+ .when(
+ keybind_display.missing_accept_keystroke,
+ |el| el.strikethrough(),
+ )
.line_height_style(LineHeightStyle::UiLabel),
)
})
.id("edit_prediction_cursor_popover_keybind")
- .when(accept_keystroke.is_none(), |el| {
+ .when(keybind_display.missing_accept_keystroke, |el| {
let status_colors = cx.theme().status();
el.bg(status_colors.error_background)
@@ -10041,15 +10223,13 @@ impl Editor {
})
})
.when_some(
- accept_keystroke.as_ref(),
- |el, accept_keystroke| {
- el.child(h_flex().children(ui::render_modifiers(
- accept_keystroke.modifiers(),
- PlatformStyle::platform(),
- Some(Color::Default),
- Some(IconSize::XSmall.rems().into()),
- false,
- )))
+ keybind_display.displayed_keystroke.as_ref(),
+ |el, compact_keystroke| {
+ el.child(self.render_edit_prediction_popover_keystroke(
+ compact_keystroke,
+ Color::Default,
+ cx,
+ ))
},
),
)
@@ -10096,8 +10276,12 @@ impl Editor {
};
let has_completion = self.active_edit_prediction.is_some();
+ let keybind_display = self.edit_prediction_keybind_display(
+ EditPredictionKeybindSurface::CursorPopoverExpanded,
+ window,
+ cx,
+ );
- let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac;
Some(
h_flex()
.min_w(min_width)
@@ -10113,41 +10297,51 @@ impl Editor {
.overflow_hidden()
.child(completion),
)
- .when_some(accept_keystroke, |el, accept_keystroke| {
- if !accept_keystroke.modifiers().modified() {
- return el;
- }
+ .when_some(
+ keybind_display.displayed_keystroke.as_ref(),
+ |el, keystroke| {
+ let key_color = if !has_completion {
+ Color::Muted
+ } else {
+ Color::Default
+ };
- el.child(
- h_flex()
- .h_full()
- .border_l_1()
- .rounded_r_lg()
- .border_color(cx.theme().colors().border)
- .bg(Self::edit_prediction_line_popover_bg_color(cx))
- .gap_1()
- .py_1()
- .px_2()
- .child(
+ if keybind_display.action == EditPredictionKeybindAction::Preview {
+ el.child(
h_flex()
- .font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
- .when(is_platform_style_mac, |parent| parent.gap_1())
- .child(h_flex().children(ui::render_modifiers(
- accept_keystroke.modifiers(),
- PlatformStyle::platform(),
- Some(if !has_completion {
- Color::Muted
- } else {
- Color::Default
- }),
- None,
- false,
- ))),
+ .h_full()
+ .border_l_1()
+ .rounded_r_lg()
+ .border_color(cx.theme().colors().border)
+ .bg(Self::edit_prediction_line_popover_bg_color(cx))
+ .gap_1()
+ .py_1()
+ .px_2()
+ .child(self.render_edit_prediction_popover_keystroke(
+ keystroke, key_color, cx,
+ ))
+ .child(Label::new("Preview").into_any_element())
+ .opacity(if has_completion { 1.0 } else { 0.4 }),
)
- .child(Label::new("Preview").into_any_element())
- .opacity(if has_completion { 1.0 } else { 0.4 }),
- )
- })
+ } else {
+ el.child(
+ h_flex()
+ .h_full()
+ .border_l_1()
+ .rounded_r_lg()
+ .border_color(cx.theme().colors().border)
+ .bg(Self::edit_prediction_line_popover_bg_color(cx))
+ .gap_1()
+ .py_1()
+ .px_2()
+ .child(self.render_edit_prediction_popover_keystroke(
+ keystroke, key_color, cx,
+ ))
+ .opacity(if has_completion { 1.0 } else { 0.4 }),
+ )
+ }
+ },
+ )
.into_any(),
)
}
@@ -11839,6 +12033,98 @@ impl Editor {
}
}
+ pub fn align_selections(
+ &mut self,
+ _: &crate::actions::AlignSelections,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+
+ let display_snapshot = self.display_snapshot(cx);
+
+ struct CursorData {
+ anchor: Anchor,
+ point: Point,
+ }
+ let cursor_data: Vec<CursorData> = self
+ .selections
+ .disjoint_anchors()
+ .iter()
+ .map(|selection| {
+ let anchor = if selection.reversed {
+ selection.head()
+ } else {
+ selection.tail()
+ };
+ CursorData {
+ anchor: anchor,
+ point: anchor.to_point(&display_snapshot.buffer_snapshot()),
+ }
+ })
+ .collect();
+
+ let rows_anchors_count: Vec<usize> = cursor_data
+ .iter()
+ .map(|cursor| cursor.point.row)
+ .chunk_by(|&row| row)
+ .into_iter()
+ .map(|(_, group)| group.count())
+ .collect();
+ let max_columns = rows_anchors_count.iter().max().copied().unwrap_or(0);
+ let mut rows_column_offset = vec![0; rows_anchors_count.len()];
+ let mut edits = Vec::new();
+
+ for column_idx in 0..max_columns {
+ let mut cursor_index = 0;
+
+ // Calculate target_column => position that the selections will go
+ let mut target_column = 0;
+ for (row_idx, cursor_count) in rows_anchors_count.iter().enumerate() {
+ // Skip rows that don't have this column
+ if column_idx >= *cursor_count {
+ cursor_index += cursor_count;
+ continue;
+ }
+
+ let point = &cursor_data[cursor_index + column_idx].point;
+ let adjusted_column = point.column + rows_column_offset[row_idx];
+ if adjusted_column > target_column {
+ target_column = adjusted_column;
+ }
+ cursor_index += cursor_count;
+ }
+
+ // Collect edits for this column
+ cursor_index = 0;
+ for (row_idx, cursor_count) in rows_anchors_count.iter().enumerate() {
+ // Skip rows that don't have this column
+ if column_idx >= *cursor_count {
+ cursor_index += *cursor_count;
+ continue;
+ }
+
+ let point = &cursor_data[cursor_index + column_idx].point;
+ let spaces_needed = target_column - point.column - rows_column_offset[row_idx];
+ if spaces_needed > 0 {
+ let anchor = cursor_data[cursor_index + column_idx]
+ .anchor
+ .bias_left(&display_snapshot);
+ edits.push((anchor..anchor, " ".repeat(spaces_needed as usize)));
+ }
+ rows_column_offset[row_idx] += spaces_needed;
+
+ cursor_index += *cursor_count;
+ }
+ }
+
+ if !edits.is_empty() {
+ self.transact(window, cx, |editor, _window, cx| {
+ editor.edit(edits, cx);
+ });
+ }
+ }
+
pub fn disable_breakpoint(
&mut self,
_: &crate::actions::DisableBreakpoint,
@@ -13123,16 +13409,6 @@ impl Editor {
// Split selections to respect paragraph, indent, and comment prefix boundaries.
let wrap_ranges = selections.into_iter().flat_map(|selection| {
- let mut non_blank_rows_iter = (selection.start.row..=selection.end.row)
- .filter(|row| !buffer.is_line_blank(MultiBufferRow(*row)))
- .peekable();
-
- let first_row = if let Some(&row) = non_blank_rows_iter.peek() {
- row
- } else {
- return Vec::new();
- };
-
let language_settings = buffer.language_settings_at(selection.head(), cx);
let language_scope = buffer.language_scope_at(selection.head());
@@ -13209,8 +13485,70 @@ impl Editor {
(indent, comment_prefix, rewrap_prefix)
};
+ let mut start_row = selection.start.row;
+ let mut end_row = selection.end.row;
+
+ if selection.is_empty() {
+ let cursor_row = selection.start.row;
+
+ let (mut indent_size, comment_prefix, _) = indent_and_prefix_for_row(cursor_row);
+ let line_prefix = match &comment_prefix {
+ Some(CommentFormat::Line(prefix) | CommentFormat::BlockLine(prefix)) => {
+ Some(prefix.as_str())
+ }
+ Some(CommentFormat::BlockCommentWithEnd(BlockCommentConfig {
+ prefix, ..
+ })) => Some(prefix.as_ref()),
+ Some(CommentFormat::BlockCommentWithStart(BlockCommentConfig {
+ start: _,
+ end: _,
+ prefix,
+ tab_size,
+ })) => {
+ indent_size.len += tab_size;
+ Some(prefix.as_ref())
+ }
+ None => None,
+ };
+ let indent_prefix = indent_size.chars().collect::<String>();
+ let line_prefix = format!("{indent_prefix}{}", line_prefix.unwrap_or(""));
+
+ 'expand_upwards: while start_row > 0 {
+ let prev_row = start_row - 1;
+ if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix)
+ && buffer.line_len(MultiBufferRow(prev_row)) as usize > line_prefix.len()
+ && !buffer.is_line_blank(MultiBufferRow(prev_row))
+ {
+ start_row = prev_row;
+ } else {
+ break 'expand_upwards;
+ }
+ }
+
+ 'expand_downwards: while end_row < buffer.max_point().row {
+ let next_row = end_row + 1;
+ if buffer.contains_str_at(Point::new(next_row, 0), &line_prefix)
+ && buffer.line_len(MultiBufferRow(next_row)) as usize > line_prefix.len()
+ && !buffer.is_line_blank(MultiBufferRow(next_row))
+ {
+ end_row = next_row;
+ } else {
+ break 'expand_downwards;
+ }
+ }
+ }
+
+ let mut non_blank_rows_iter = (start_row..=end_row)
+ .filter(|row| !buffer.is_line_blank(MultiBufferRow(*row)))
+ .peekable();
+
+ let first_row = if let Some(&row) = non_blank_rows_iter.peek() {
+ row
+ } else {
+ return Vec::new();
+ };
+
let mut ranges = Vec::new();
- let from_empty_selection = selection.is_empty();
let mut current_range_start = first_row;
let mut prev_row = first_row;
@@ -13241,7 +13579,6 @@ impl Editor {
current_range_indent,
current_range_comment_delimiters.clone(),
current_range_rewrap_prefix.clone(),
- from_empty_selection,
));
current_range_start = row;
current_range_indent = row_indent;
@@ -13258,7 +13595,6 @@ impl Editor {
current_range_indent,
current_range_comment_delimiters,
current_range_rewrap_prefix,
- from_empty_selection,
));
ranges
@@ -13267,17 +13603,11 @@ impl Editor {
let mut edits = Vec::new();
let mut rewrapped_row_ranges = Vec::<RangeInclusive<u32>>::new();
- for (
- language_settings,
- wrap_range,
- mut indent_size,
- comment_prefix,
- rewrap_prefix,
- from_empty_selection,
- ) in wrap_ranges
+ for (language_settings, wrap_range, mut indent_size, comment_prefix, rewrap_prefix) in
+ wrap_ranges
{
- let mut start_row = wrap_range.start.row;
- let mut end_row = wrap_range.end.row;
+ let start_row = wrap_range.start.row;
+ let end_row = wrap_range.end.row;
// Skip selections that overlap with a range that has already been rewrapped.
let selection_range = start_row..end_row;
@@ -13324,32 +13654,6 @@ impl Editor {
continue;
}
- if from_empty_selection {
- 'expand_upwards: while start_row > 0 {
- let prev_row = start_row - 1;
- if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix)
- && buffer.line_len(MultiBufferRow(prev_row)) as usize > line_prefix.len()
- && !buffer.is_line_blank(MultiBufferRow(prev_row))
- {
- start_row = prev_row;
- } else {
- break 'expand_upwards;
- }
- }
-
- 'expand_downwards: while end_row < buffer.max_point().row {
- let next_row = end_row + 1;
- if buffer.contains_str_at(Point::new(next_row, 0), &line_prefix)
- && buffer.line_len(MultiBufferRow(next_row)) as usize > line_prefix.len()
- && !buffer.is_line_blank(MultiBufferRow(next_row))
- {
- end_row = next_row;
- } else {
- break 'expand_downwards;
- }
- }
- }
-
let start = Point::new(start_row, 0);
let start_offset = ToOffset::to_offset(&start, &buffer);
let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row)));
@@ -26,7 +26,7 @@ use language::{
BracketPairConfig,
Capability::ReadWrite,
DiagnosticSourceKind, FakeLspAdapter, IndentGuideSettings, LanguageConfig,
- LanguageConfigOverride, LanguageMatcher, LanguageName, Override, Point,
+ LanguageConfigOverride, LanguageMatcher, LanguageName, LanguageQueries, Override, Point,
language_settings::{
CompletionSettingsContent, FormatterList, LanguageSettingsContent, LspInsertMode,
},
@@ -51,6 +51,7 @@ use settings::{
IndentGuideBackgroundColoring, IndentGuideColoring, InlayHintSettingsContent,
ProjectSettingsContent, SearchSettingsContent, SettingsContent, SettingsStore,
};
+use std::borrow::Cow;
use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant};
use std::{
iter,
@@ -319,6 +320,71 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+fn test_accessibility_keyboard_word_completion(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ // Simulates the macOS Accessibility Keyboard word completion panel, which calls
+ // insertText:replacementRange: to commit a completion. macOS sends two calls per
+ // completion: one with a non-empty range replacing the typed prefix, and one with
+ // an empty replacement range (cursor..cursor) to append a trailing space.
+
+ cx.add_window(|window, cx| {
+ let buffer = MultiBuffer::build_simple("ab", cx);
+ let mut editor = build_editor(buffer, window, cx);
+
+ // Cursor is after the 2-char prefix "ab" at offset 2.
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(2)])
+ });
+
+ // macOS completes "about" by replacing the prefix via range 0..2.
+ editor.replace_text_in_range(Some(0..2), "about", window, cx);
+ assert_eq!(editor.text(cx), "about");
+
+ // macOS sends a trailing space as an empty replacement range (cursor..cursor).
+ // Must insert at the cursor position, not call backspace first (which would
+ // delete the preceding character).
+ editor.replace_text_in_range(Some(5..5), " ", window, cx);
+ assert_eq!(editor.text(cx), "about ");
+
+ editor
+ });
+
+ // Multi-cursor: the replacement must fan out to all cursors, and the trailing
+ // space must land at each cursor's actual current position. After the first
+ // completion, macOS's reported cursor offset is stale (it doesn't account for
+ // the offset shift caused by the other cursor's insertion), so the empty
+ // replacement range must be ignored and the space inserted at each real cursor.
+ cx.add_window(|window, cx| {
+ // Two cursors, each after a 2-char prefix "ab" at the end of each line:
+ // "ab\nab" — cursors at offsets 2 and 5.
+ let buffer = MultiBuffer::build_simple("ab\nab", cx);
+ let mut editor = build_editor(buffer, window, cx);
+
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([
+ MultiBufferOffset(2)..MultiBufferOffset(2),
+ MultiBufferOffset(5)..MultiBufferOffset(5),
+ ])
+ });
+
+ // macOS reports the newest cursor (offset 5) and sends range 3..5 to
+ // replace its 2-char prefix. selection_replacement_ranges applies the same
+ // delta to fan out to both cursors: 0..2 and 3..5.
+ editor.replace_text_in_range(Some(3..5), "about", window, cx);
+ assert_eq!(editor.text(cx), "about\nabout");
+
+ // Trailing space via empty range. macOS thinks the cursor is at offset 10
+ // (5 - 2 + 7 = 10), but the actual cursors are at 5 and 11. The stale
+ // offset must be ignored and the space inserted at each real cursor position.
+ editor.replace_text_in_range(Some(10..10), " ", window, cx);
+ assert_eq!(editor.text(cx), "about \nabout ");
+
+ editor
+ });
+}
+
#[gpui::test]
fn test_ime_composition(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -1324,6 +1390,105 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_fold_with_unindented_multiline_raw_string(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig::default(),
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ )
+ .with_queries(LanguageQueries {
+ overrides: Some(Cow::from(indoc! {"
+ [
+ (string_literal)
+ (raw_string_literal)
+ ] @string
+ [
+ (line_comment)
+ (block_comment)
+ ] @comment.inclusive
+ "})),
+ ..Default::default()
+ })
+ .expect("Could not parse queries"),
+ );
+
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+ cx.set_state(indoc! {"
+ fn main() {
+ let s = r#\"
+ a
+ b
+ c
+ \"#;
+ }ˇ
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor.fold_at_level(&FoldAtLevel(1), window, cx);
+ assert_eq!(
+ editor.display_text(cx),
+ indoc! {"
+ fn main() {⋯
+ }
+ "},
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_fold_with_unindented_multiline_block_comment(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig::default(),
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ )
+ .with_queries(LanguageQueries {
+ overrides: Some(Cow::from(indoc! {"
+ [
+ (string_literal)
+ (raw_string_literal)
+ ] @string
+ [
+ (line_comment)
+ (block_comment)
+ ] @comment.inclusive
+ "})),
+ ..Default::default()
+ })
+ .expect("Could not parse queries"),
+ );
+
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+ cx.set_state(indoc! {"
+ fn main() {
+ let x = 1;
+ /*
+ unindented comment line
+ */
+ }ˇ
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor.fold_at_level(&FoldAtLevel(1), window, cx);
+ assert_eq!(
+ editor.display_text(cx),
+ indoc! {"
+ fn main() {⋯
+ }
+ "},
+ );
+ });
+}
+
#[gpui::test]
fn test_fold_at_level(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -7297,6 +7462,48 @@ async fn test_rewrap(cx: &mut TestAppContext) {
also very long and should not merge
with the numbered item.ˇ»
"},
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that empty selection rewrap on a numbered list item does not merge adjacent items
+ assert_rewrap(
+ indoc! {"
+ 1. This is the first numbered list item that is very long and needs to be wrapped properly.
+ 2. ˇThis is the second numbered list item that is also very long and needs to be wrapped.
+ 3. This is the third numbered list item, shorter.
+ "},
+ indoc! {"
+ 1. This is the first numbered list item
+ that is very long and needs to be
+ wrapped properly.
+ 2. ˇThis is the second numbered list item
+ that is also very long and needs to
+ be wrapped.
+ 3. This is the third numbered list item,
+ shorter.
+ "},
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that empty selection rewrap on a bullet list item does not merge adjacent items
+ assert_rewrap(
+ indoc! {"
+ - This is the first bullet item that is very long and needs wrapping properly here.
+ - ˇThis is the second bullet item that is also very long and needs to be wrapped.
+ - This is the third bullet item, shorter.
+ "},
+ indoc! {"
+ - This is the first bullet item that is
+ very long and needs wrapping properly
+ here.
+ - ˇThis is the second bullet item that is
+ also very long and needs to be
+ wrapped.
+ - This is the third bullet item,
+ shorter.
+ "},
markdown_language,
&mut cx,
);
@@ -9446,6 +9653,28 @@ async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut Tes
));
}
+#[gpui::test]
+async fn test_add_selection_above_below_multibyte(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ // Cursor after "Häl" (byte column 4, char column 3) should align to
+ // char column 3 on the ASCII line below, not byte column 4.
+ cx.set_state(indoc!(
+ r#"Hälˇlö
+ Hallo"#
+ ));
+
+ cx.update_editor(|editor, window, cx| {
+ editor.add_selection_below(&Default::default(), window, cx);
+ });
+
+ cx.assert_editor_state(indoc!(
+ r#"Hälˇlö
+ Halˇlo"#
+ ));
+}
+
#[gpui::test]
async fn test_select_next(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -24698,6 +24927,163 @@ async fn test_goto_definition_no_fallback(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_goto_definition_close_ranges_open_singleton(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ definition_provider: Some(lsp::OneOf::Left(true)),
+ ..lsp::ServerCapabilities::default()
+ },
+ cx,
+ )
+ .await;
+
+ // File content: 10 lines with functions defined on lines 3, 5, and 7 (0-indexed).
+ // With the default excerpt_context_lines of 2, ranges that are within
+ // 2 * 2 = 4 rows of each other should be grouped into one excerpt.
+ cx.set_state(
+ &r#"fn caller() {
+ let _ = ˇtarget();
+ }
+ fn target_a() {}
+
+ fn target_b() {}
+
+ fn target_c() {}
+ "#
+ .unindent(),
+ );
+
+ // Return two definitions that are close together (lines 3 and 5, gap of 2 rows)
+ cx.set_request_handler::<lsp::request::GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Array(vec![
+ lsp::Location {
+ uri: url.clone(),
+ range: lsp::Range::new(lsp::Position::new(3, 3), lsp::Position::new(3, 11)),
+ },
+ lsp::Location {
+ uri: url,
+ range: lsp::Range::new(lsp::Position::new(5, 3), lsp::Position::new(5, 11)),
+ },
+ ])))
+ });
+
+ let navigated = cx
+ .update_editor(|editor, window, cx| editor.go_to_definition(&GoToDefinition, window, cx))
+ .await
+ .expect("Failed to navigate to definitions");
+ assert_eq!(navigated, Navigated::Yes);
+
+ let editors = cx.update_workspace(|workspace, _, cx| {
+ workspace.items_of_type::<Editor>(cx).collect::<Vec<_>>()
+ });
+ cx.update_editor(|_, _, _| {
+ assert_eq!(
+ editors.len(),
+ 1,
+ "Close ranges should navigate in-place without opening a new editor"
+ );
+ });
+
+ // Both target ranges should be selected
+ cx.assert_editor_state(
+ &r#"fn caller() {
+ let _ = target();
+ }
+ fn «target_aˇ»() {}
+
+ fn «target_bˇ»() {}
+
+ fn target_c() {}
+ "#
+ .unindent(),
+ );
+}
+
+#[gpui::test]
+async fn test_goto_definition_far_ranges_open_multibuffer(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ definition_provider: Some(lsp::OneOf::Left(true)),
+ ..lsp::ServerCapabilities::default()
+ },
+ cx,
+ )
+ .await;
+
+ // Create a file with definitions far apart (more than 2 * excerpt_context_lines rows).
+ cx.set_state(
+ &r#"fn caller() {
+ let _ = ˇtarget();
+ }
+ fn target_a() {}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ fn target_b() {}
+ "#
+ .unindent(),
+ );
+
+ // Return two definitions that are far apart (lines 3 and 19, gap of 16 rows)
+ cx.set_request_handler::<lsp::request::GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Array(vec![
+ lsp::Location {
+ uri: url.clone(),
+ range: lsp::Range::new(lsp::Position::new(3, 3), lsp::Position::new(3, 11)),
+ },
+ lsp::Location {
+ uri: url,
+ range: lsp::Range::new(lsp::Position::new(19, 3), lsp::Position::new(19, 11)),
+ },
+ ])))
+ });
+
+ let navigated = cx
+ .update_editor(|editor, window, cx| editor.go_to_definition(&GoToDefinition, window, cx))
+ .await
+ .expect("Failed to navigate to definitions");
+ assert_eq!(navigated, Navigated::Yes);
+
+ let editors = cx.update_workspace(|workspace, _, cx| {
+ workspace.items_of_type::<Editor>(cx).collect::<Vec<_>>()
+ });
+ cx.update_editor(|_, _, test_editor_cx| {
+ assert_eq!(
+ editors.len(),
+ 2,
+ "Far apart ranges should open a new multibuffer editor"
+ );
+ let multibuffer_editor = editors
+ .into_iter()
+ .find(|editor| *editor != test_editor_cx.entity())
+ .expect("Should have a multibuffer editor");
+ let multibuffer_text = multibuffer_editor.read(test_editor_cx).text(test_editor_cx);
+ assert!(
+ multibuffer_text.contains("target_a"),
+ "Multibuffer should contain the first definition"
+ );
+ assert!(
+ multibuffer_text.contains("target_b"),
+ "Multibuffer should contain the second definition"
+ );
+ });
+}
+
#[gpui::test]
async fn test_find_all_references_editor_reuse(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -30471,14 +30857,47 @@ async fn test_end_of_editor_context(cx: &mut TestAppContext) {
cx.set_state("line1\nline2ˇ");
cx.update_editor(|e, window, cx| {
e.set_mode(EditorMode::SingleLine);
+ assert!(!e.key_context(window, cx).contains("start_of_input"));
assert!(e.key_context(window, cx).contains("end_of_input"));
});
cx.set_state("ˇline1\nline2");
cx.update_editor(|e, window, cx| {
+ e.set_mode(EditorMode::SingleLine);
+ assert!(e.key_context(window, cx).contains("start_of_input"));
assert!(!e.key_context(window, cx).contains("end_of_input"));
});
cx.set_state("line1ˇ\nline2");
cx.update_editor(|e, window, cx| {
+ e.set_mode(EditorMode::SingleLine);
+ assert!(!e.key_context(window, cx).contains("start_of_input"));
+ assert!(!e.key_context(window, cx).contains("end_of_input"));
+ });
+
+ cx.set_state("line1\nline2ˇ");
+ cx.update_editor(|e, window, cx| {
+ e.set_mode(EditorMode::AutoHeight {
+ min_lines: 1,
+ max_lines: Some(4),
+ });
+ assert!(!e.key_context(window, cx).contains("start_of_input"));
+ assert!(e.key_context(window, cx).contains("end_of_input"));
+ });
+ cx.set_state("ˇline1\nline2");
+ cx.update_editor(|e, window, cx| {
+ e.set_mode(EditorMode::AutoHeight {
+ min_lines: 1,
+ max_lines: Some(4),
+ });
+ assert!(e.key_context(window, cx).contains("start_of_input"));
+ assert!(!e.key_context(window, cx).contains("end_of_input"));
+ });
+ cx.set_state("line1ˇ\nline2");
+ cx.update_editor(|e, window, cx| {
+ e.set_mode(EditorMode::AutoHeight {
+ min_lines: 1,
+ max_lines: Some(4),
+ });
+ assert!(!e.key_context(window, cx).contains("start_of_input"));
assert!(!e.key_context(window, cx).contains("end_of_input"));
});
}
@@ -30828,7 +31247,7 @@ async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) {
let fn_foo = || empty_range(0, 0);
let impl_bar = || empty_range(4, 0);
- let fn_new = || empty_range(5, 4);
+ let fn_new = || empty_range(5, 0);
let mut scroll_and_click = |scroll_offset: ScrollOffset, click_offset: ScrollOffset| {
cx.update_editor(|e, window, cx| {
@@ -30914,6 +31333,36 @@ async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) {
// we don't assert on the visible_range because if we clicked the gutter, our line is fully selected
(gpui::Point { x: 0., y: 1.5 })
);
+
+ // Verify clicking at a specific x position within a sticky header places
+ // the cursor at the corresponding column.
+ let (text_origin_x, em_width) = cx.update_editor(|editor, _, _| {
+ let position_map = editor.last_position_map.as_ref().unwrap();
+ (
+ position_map.text_hitbox.bounds.origin.x,
+ position_map.em_layout_width,
+ )
+ });
+
+ // Click on "impl Bar {" sticky header at column 5 (the 'B' in 'Bar').
+ // The text "impl Bar {" starts at column 0, so column 5 = 'B'.
+ let click_x = text_origin_x + em_width * 5.5;
+ cx.update_editor(|e, window, cx| {
+ e.scroll(gpui::Point { x: 0., y: 4.5 }, None, window, cx);
+ });
+ cx.run_until_parked();
+ cx.simulate_click(
+ gpui::Point {
+ x: click_x,
+ y: 0.25 * line_height,
+ },
+ Modifiers::none(),
+ );
+ cx.run_until_parked();
+ let (scroll_pos, selections) =
+ cx.update_editor(|e, _, cx| (e.scroll_position(cx), display_ranges(e, cx)));
+ assert_eq!(scroll_pos, gpui::Point { x: 0., y: 4. });
+ assert_eq!(selections, vec![empty_range(4, 5)]);
}
#[gpui::test]
@@ -34119,3 +34568,151 @@ async fn test_restore_and_next(cx: &mut TestAppContext) {
.unindent(),
);
}
+
+#[gpui::test]
+async fn test_align_selections(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ // 1) one cursor, no action
+ let before = " abc\n abc\nabc\n ˇabc";
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(before);
+
+ // 2) multiple cursors at different rows
+ let before = indoc!(
+ r#"
+ let aˇbc = 123;
+ let xˇyz = 456;
+ let fˇoo = 789;
+ let bˇar = 0;
+ "#
+ );
+ let after = indoc!(
+ r#"
+ let a ˇbc = 123;
+ let x ˇyz = 456;
+ let f ˇoo = 789;
+ let bˇar = 0;
+ "#
+ );
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(after);
+
+ // 3) multiple selections at different rows
+ let before = indoc!(
+ r#"
+ let «ˇabc» = 123;
+ let «ˇxyz» = 456;
+ let «ˇfoo» = 789;
+ let «ˇbar» = 0;
+ "#
+ );
+ let after = indoc!(
+ r#"
+ let «ˇabc» = 123;
+ let «ˇxyz» = 456;
+ let «ˇfoo» = 789;
+ let «ˇbar» = 0;
+ "#
+ );
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(after);
+
+ // 4) multiple selections at different rows, inverted head
+ let before = indoc!(
+ r#"
+ let «abcˇ» = 123;
+ // comment
+ let «xyzˇ» = 456;
+ let «fooˇ» = 789;
+ let «barˇ» = 0;
+ "#
+ );
+ let after = indoc!(
+ r#"
+ let «abcˇ» = 123;
+ // comment
+ let «xyzˇ» = 456;
+ let «fooˇ» = 789;
+ let «barˇ» = 0;
+ "#
+ );
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(after);
+}
+
+#[gpui::test]
+async fn test_align_selections_multicolumn(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ // 1) Multicolumn, one non affected editor row
+ let before = indoc!(
+ r#"
+ name «|ˇ» age «|ˇ» height «|ˇ» note
+ Matthew «|ˇ» 7 «|ˇ» 2333 «|ˇ» smart
+ Mike «|ˇ» 1234 «|ˇ» 567 «|ˇ» lazy
+ Anything that is not selected
+ Miles «|ˇ» 88 «|ˇ» 99 «|ˇ» funny
+ "#
+ );
+ let after = indoc!(
+ r#"
+ name «|ˇ» age «|ˇ» height «|ˇ» note
+ Matthew «|ˇ» 7 «|ˇ» 2333 «|ˇ» smart
+ Mike «|ˇ» 1234 «|ˇ» 567 «|ˇ» lazy
+ Anything that is not selected
+ Miles «|ˇ» 88 «|ˇ» 99 «|ˇ» funny
+ "#
+ );
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(after);
+
+ // 2) not all alignment rows has the number of alignment columns
+ let before = indoc!(
+ r#"
+ name «|ˇ» age «|ˇ» height
+ Matthew «|ˇ» 7 «|ˇ» 2333
+ Mike «|ˇ» 1234
+ Miles «|ˇ» 88 «|ˇ» 99
+ "#
+ );
+ let after = indoc!(
+ r#"
+ name «|ˇ» age «|ˇ» height
+ Matthew «|ˇ» 7 «|ˇ» 2333
+ Mike «|ˇ» 1234
+ Miles «|ˇ» 88 «|ˇ» 99
+ "#
+ );
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(after);
+
+ // 3) A aligned column shall stay aligned
+ let before = indoc!(
+ r#"
+ $ ˇa ˇa
+ $ ˇa ˇa
+ $ ˇa ˇa
+ $ ˇa ˇa
+ "#
+ );
+ let after = indoc!(
+ r#"
+ $ ˇa ˇa
+ $ ˇa ˇa
+ $ ˇa ˇa
+ $ ˇa ˇa
+ "#
+ );
+ cx.set_state(before);
+ cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
+ cx.assert_editor_state(after);
+}
@@ -43,13 +43,12 @@ use gpui::{
Bounds, ClickEvent, ClipboardItem, ContentMask, Context, Corner, Corners, CursorStyle,
DispatchPhase, Edges, Element, ElementInputHandler, Entity, Focusable as _, Font, FontId,
FontWeight, GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement,
- IsZero, KeybindingKeystroke, Length, Modifiers, ModifiersChangedEvent, MouseButton,
- MouseClickEvent, MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad,
- ParentElement, Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine,
- SharedString, Size, StatefulInteractiveElement, Style, Styled, StyledText, TextAlign, TextRun,
- TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop,
- linear_gradient, outline, pattern_slash, point, px, quad, relative, size, solid_background,
- transparent_black,
+ IsZero, Length, Modifiers, ModifiersChangedEvent, MouseButton, MouseClickEvent, MouseDownEvent,
+ MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels,
+ PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString, Size,
+ StatefulInteractiveElement, Style, Styled, StyledText, TextAlign, TextRun, TextStyleRefinement,
+ WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline,
+ pattern_slash, point, px, quad, relative, size, solid_background, transparent_black,
};
use itertools::Itertools;
use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting};
@@ -59,8 +58,6 @@ use multi_buffer::{
MultiBufferRow, RowInfo,
};
-use edit_prediction_types::EditPredictionGranularity;
-
use project::{
DisableAiSettings, Entry, ProjectPath,
debugger::breakpoint_store::{Breakpoint, BreakpointSessionState},
@@ -653,6 +650,7 @@ impl EditorElement {
register_action(editor, window, Editor::enable_breakpoint);
register_action(editor, window, Editor::disable_breakpoint);
register_action(editor, window, Editor::toggle_read_only);
+ register_action(editor, window, Editor::align_selections);
if editor.read(cx).enable_wrap_selections_in_tag(cx) {
register_action(editor, window, Editor::wrap_selections_in_tag);
}
@@ -4599,7 +4597,6 @@ impl EditorElement {
let mut lines = Vec::<StickyHeaderLine>::new();
for StickyHeader {
- item,
sticky_row,
start_point,
offset,
@@ -4639,7 +4636,6 @@ impl EditorElement {
line_height * offset as f32,
line,
line_number,
- item.range.start,
line_height,
scroll_pixel_position,
content_origin,
@@ -4705,7 +4701,6 @@ impl EditorElement {
end_rows.push(end_row);
rows.push(StickyHeader {
- item: item.clone(),
sticky_row,
start_point,
offset,
@@ -4837,17 +4832,11 @@ impl EditorElement {
let edit_prediction = if edit_prediction_popover_visible {
self.editor.update(cx, move |editor, cx| {
- let accept_binding = editor.accept_edit_prediction_keybind(
- EditPredictionGranularity::Full,
- window,
- cx,
- );
let mut element = editor.render_edit_prediction_cursor_popover(
min_width,
max_width,
cursor_point,
style,
- accept_binding.keystroke(),
window,
cx,
)?;
@@ -6709,22 +6698,33 @@ impl EditorElement {
}
});
+ let position_map = layout.position_map.clone();
+
for (line_index, line) in sticky_headers.lines.iter().enumerate() {
let editor = self.editor.clone();
let hitbox = line.hitbox.clone();
- let target_anchor = line.target_anchor;
+ let row = line.row;
+ let line_layout = line.line.clone();
+ let position_map = position_map.clone();
window.on_mouse_event(move |event: &MouseDownEvent, phase, window, cx| {
if !phase.bubble() {
return;
}
if event.button == MouseButton::Left && hitbox.is_hovered(window) {
+ let point_for_position =
+ position_map.point_for_position_on_line(event.position, row, &line_layout);
+
editor.update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(window, cx);
+ let anchor = snapshot
+ .display_snapshot
+ .display_point_to_anchor(point_for_position.previous_valid, Bias::Left);
editor.change_selections(
SelectionEffects::scroll(Autoscroll::top_relative(line_index)),
window,
cx,
- |selections| selections.select_ranges([target_anchor..target_anchor]),
+ |selections| selections.select_ranges([anchor..anchor]),
);
cx.stop_propagation();
});
@@ -8617,21 +8617,6 @@ pub(crate) fn render_buffer_header(
})
}
-pub struct AcceptEditPredictionBinding(pub(crate) Option<gpui::KeyBinding>);
-
-impl AcceptEditPredictionBinding {
- pub fn keystroke(&self) -> Option<&KeybindingKeystroke> {
- if let Some(binding) = self.0.as_ref() {
- match &binding.keystrokes() {
- [keystroke, ..] => Some(keystroke),
- _ => None,
- }
- } else {
- None
- }
- }
-}
-
fn prepaint_gutter_button(
mut button: AnyElement,
row: DisplayRow,
@@ -9546,7 +9531,7 @@ impl EditorRequestLayoutState {
}
}
- fn can_prepaint(&self) -> bool {
+ fn has_remaining_prepaint_depth(&self) -> bool {
self.prepaint_depth.get() < Self::MAX_PREPAINT_DEPTH
}
}
@@ -10259,29 +10244,21 @@ impl Element for EditorElement {
}
})
});
- if new_renderer_widths.is_some_and(|new_renderer_widths| {
- self.editor.update(cx, |editor, cx| {
- editor.update_renderer_widths(new_renderer_widths, cx)
- })
- }) {
- // If the fold widths have changed, we need to prepaint
- // the element again to account for any changes in
- // wrapping.
- if request_layout.can_prepaint() {
- return self.prepaint(
- None,
- _inspector_id,
- bounds,
- request_layout,
- window,
- cx,
- );
- } else {
- debug_panic!(concat!(
- "skipping recursive prepaint at max depth. ",
- "renderer widths may be stale."
- ));
- }
+ let renderer_widths_changed = request_layout.has_remaining_prepaint_depth()
+ && new_renderer_widths.is_some_and(|new_renderer_widths| {
+ self.editor.update(cx, |editor, cx| {
+ editor.update_renderer_widths(new_renderer_widths, cx)
+ })
+ });
+ if renderer_widths_changed {
+ return self.prepaint(
+ None,
+ _inspector_id,
+ bounds,
+ request_layout,
+ window,
+ cx,
+ );
}
let longest_line_blame_width = self
@@ -10397,14 +10374,14 @@ impl Element for EditorElement {
resized_blocks,
} = blocks;
if let Some(resized_blocks) = resized_blocks {
- self.editor.update(cx, |editor, cx| {
- editor.resize_blocks(
- resized_blocks,
- autoscroll_request.map(|(autoscroll, _)| autoscroll),
- cx,
- )
- });
- if request_layout.can_prepaint() {
+ if request_layout.has_remaining_prepaint_depth() {
+ self.editor.update(cx, |editor, cx| {
+ editor.resize_blocks(
+ resized_blocks,
+ autoscroll_request.map(|(autoscroll, _)| autoscroll),
+ cx,
+ )
+ });
return self.prepaint(
None,
_inspector_id,
@@ -10414,10 +10391,10 @@ impl Element for EditorElement {
cx,
);
} else {
- debug_panic!(concat!(
- "skipping recursive prepaint at max depth. ",
- "block layout may be stale."
- ));
+ debug_panic!(
+ "dropping block resize because prepaint depth \
+ limit was reached"
+ );
}
}
@@ -11292,11 +11269,10 @@ struct StickyHeaders {
struct StickyHeaderLine {
row: DisplayRow,
offset: Pixels,
- line: LineWithInvisibles,
+ line: Rc<LineWithInvisibles>,
line_number: Option<ShapedLine>,
elements: SmallVec<[AnyElement; 1]>,
available_text_width: Pixels,
- target_anchor: Anchor,
hitbox: Hitbox,
}
@@ -11354,7 +11330,7 @@ impl StickyHeaders {
},
);
- window.set_cursor_style(CursorStyle::PointingHand, &line.hitbox);
+ window.set_cursor_style(CursorStyle::IBeam, &line.hitbox);
}
}
}
@@ -11365,7 +11341,6 @@ impl StickyHeaderLine {
offset: Pixels,
mut line: LineWithInvisibles,
line_number: Option<ShapedLine>,
- target_anchor: Anchor,
line_height: Pixels,
scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
content_origin: gpui::Point<Pixels>,
@@ -11395,11 +11370,10 @@ impl StickyHeaderLine {
Self {
row,
offset,
- line,
+ line: Rc::new(line),
line_number,
elements,
available_text_width,
- target_anchor,
hitbox: window.insert_hitbox(hitbox_bounds, HitboxBehavior::BlockMouseExceptScroll),
}
}
@@ -11981,6 +11955,41 @@ impl PositionMap {
column_overshoot_after_line_end,
}
}
+
+ fn point_for_position_on_line(
+ &self,
+ position: gpui::Point<Pixels>,
+ row: DisplayRow,
+ line: &LineWithInvisibles,
+ ) -> PointForPosition {
+ let text_bounds = self.text_hitbox.bounds;
+ let scroll_position = self.snapshot.scroll_position();
+ let position = position - text_bounds.origin;
+ let x = position.x + (scroll_position.x as f32 * self.em_layout_width);
+
+ let alignment_offset = line.alignment_offset(self.text_align, self.content_width);
+ let x_relative_to_text = x - alignment_offset;
+ let (column, x_overshoot_after_line_end) =
+ if let Some(ix) = line.index_for_x(x_relative_to_text) {
+ (ix as u32, px(0.))
+ } else {
+ (line.len as u32, px(0.).max(x_relative_to_text - line.width))
+ };
+
+ let mut exact_unclipped = DisplayPoint::new(row, column);
+ let previous_valid = self.snapshot.clip_point(exact_unclipped, Bias::Left);
+ let next_valid = self.snapshot.clip_point(exact_unclipped, Bias::Right);
+
+ let column_overshoot_after_line_end =
+ (x_overshoot_after_line_end / self.em_layout_width) as u32;
+ *exact_unclipped.column_mut() += column_overshoot_after_line_end;
+ PointForPosition {
+ previous_valid,
+ next_valid,
+ exact_unclipped,
+ column_overshoot_after_line_end,
+ }
+ }
}
pub(crate) struct BlockLayout {
@@ -12317,7 +12326,6 @@ impl HighlightedRange {
}
pub(crate) struct StickyHeader {
- pub item: language::OutlineItem<Anchor>,
pub sticky_row: DisplayRow,
pub start_point: Point,
pub offset: ScrollOffset,
@@ -4,7 +4,7 @@ use crate::{
NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _,
display_map::HighlightKey,
editor_settings::SeedQuerySetting,
- persistence::{DB, SerializedEditor},
+ persistence::{EditorDb, SerializedEditor},
scroll::{ScrollAnchor, ScrollOffset},
};
use anyhow::{Context as _, Result, anyhow};
@@ -1135,7 +1135,13 @@ impl SerializableItem for Editor {
_window: &mut Window,
cx: &mut App,
) -> Task<Result<()>> {
- workspace::delete_unloaded_items(alive_items, workspace_id, "editors", &DB, cx)
+ workspace::delete_unloaded_items(
+ alive_items,
+ workspace_id,
+ "editors",
+ &EditorDb::global(cx),
+ cx,
+ )
}
fn deserialize(
@@ -1146,7 +1152,7 @@ impl SerializableItem for Editor {
window: &mut Window,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
- let serialized_editor = match DB
+ let serialized_editor = match EditorDb::global(cx)
.get_serialized_editor(item_id, workspace_id)
.context("Failed to query editor state")
{
@@ -1361,6 +1367,7 @@ impl SerializableItem for Editor {
let snapshot = buffer.read(cx).snapshot();
+ let db = EditorDb::global(cx);
Some(cx.spawn_in(window, async move |_this, cx| {
cx.background_spawn(async move {
let (contents, language) = if serialize_dirty_buffers && is_dirty {
@@ -1378,7 +1385,7 @@ impl SerializableItem for Editor {
mtime,
};
log::debug!("Serializing editor {item_id:?} in workspace {workspace_id:?}");
- DB.save_serialized_editor(item_id, workspace_id, editor)
+ db.save_serialized_editor(item_id, workspace_id, editor)
.await
.context("failed to save serialized editor")
})
@@ -1638,14 +1645,9 @@ impl SearchableItem for Editor {
match setting {
SeedQuerySetting::Never => String::new(),
SeedQuerySetting::Selection | SeedQuerySetting::Always if !selection.is_empty() => {
- let text: String = buffer_snapshot
+ buffer_snapshot
.text_for_range(selection.start..selection.end)
- .collect();
- if text.contains('\n') {
- String::new()
- } else {
- text
- }
+ .collect()
}
SeedQuerySetting::Selection => String::new(),
SeedQuerySetting::Always => {
@@ -2110,7 +2112,9 @@ mod tests {
MultiWorkspace::test_new(project.clone(), window, cx)
});
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let workspace_id = db.next_id().await.unwrap();
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
let item_id = 1234 as ItemId;
let mtime = fs
.metadata(Path::new(path!("/file.rs")))
@@ -2126,7 +2130,8 @@ mod tests {
mtime: Some(mtime),
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor.clone())
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor.clone())
.await
.unwrap();
@@ -2149,8 +2154,10 @@ mod tests {
MultiWorkspace::test_new(project.clone(), window, cx)
});
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let item_id = 5678 as ItemId;
let serialized_editor = SerializedEditor {
@@ -2160,7 +2167,8 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor)
.await
.unwrap();
@@ -2189,8 +2197,10 @@ mod tests {
MultiWorkspace::test_new(project.clone(), window, cx)
});
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let item_id = 9012 as ItemId;
let serialized_editor = SerializedEditor {
@@ -2200,7 +2210,8 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor)
.await
.unwrap();
@@ -2227,8 +2238,10 @@ mod tests {
MultiWorkspace::test_new(project.clone(), window, cx)
});
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let item_id = 9345 as ItemId;
let old_mtime = MTime::from_seconds_and_nanos(0, 50);
@@ -2239,7 +2252,8 @@ mod tests {
mtime: Some(old_mtime),
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor)
.await
.unwrap();
@@ -2259,8 +2273,10 @@ mod tests {
MultiWorkspace::test_new(project.clone(), window, cx)
});
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let item_id = 10000 as ItemId;
let serialized_editor = SerializedEditor {
@@ -2270,7 +2286,8 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor)
.await
.unwrap();
@@ -2301,8 +2318,10 @@ mod tests {
MultiWorkspace::test_new(project.clone(), window, cx)
});
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let item_id = 11000 as ItemId;
let mtime = fs
@@ -2320,7 +2339,8 @@ mod tests {
mtime: Some(mtime),
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor)
.await
.unwrap();
@@ -2357,8 +2377,10 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+ let editor_db = cx.update(|_, cx| EditorDb::global(cx));
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let item_id = 99999 as ItemId;
let serialized_editor = SerializedEditor {
@@ -2368,7 +2390,8 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+ editor_db
+ .save_serialized_editor(item_id, workspace_id, serialized_editor)
.await
.unwrap();
@@ -286,13 +286,7 @@ pub fn deploy_context_menu(
.separator()
.action_disabled_when(
!has_reveal_target,
- if cfg!(target_os = "macos") {
- "Reveal in Finder"
- } else if cfg!(target_os = "windows") {
- "Reveal in File Explorer"
- } else {
- "Reveal in File Manager"
- },
+ ui::utils::reveal_in_file_manager_label(false),
Box::new(RevealInFileManager),
)
.when(is_markdown, |builder| {
@@ -226,7 +226,7 @@ impl Domain for EditorDb {
];
}
-db::static_connection!(DB, EditorDb, [WorkspaceDb]);
+db::static_connection!(EditorDb, [WorkspaceDb]);
// https://www.sqlite.org/limits.html
// > <..> the maximum value of a host parameter number is SQLITE_MAX_VARIABLE_NUMBER,
@@ -415,8 +415,10 @@ mod tests {
use super::*;
#[gpui::test]
- async fn test_save_and_get_serialized_editor() {
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ async fn test_save_and_get_serialized_editor(cx: &mut gpui::TestAppContext) {
+ let db = cx.update(|cx| workspace::WorkspaceDb::global(cx));
+ let workspace_id = db.next_id().await.unwrap();
+ let editor_db = cx.update(|cx| EditorDb::global(cx));
let serialized_editor = SerializedEditor {
abs_path: Some(PathBuf::from("testing.txt")),
@@ -425,11 +427,12 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+ editor_db
+ .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
.await
.unwrap();
- let have = DB
+ let have = editor_db
.get_serialized_editor(1234, workspace_id)
.unwrap()
.unwrap();
@@ -443,11 +446,12 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+ editor_db
+ .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
.await
.unwrap();
- let have = DB
+ let have = editor_db
.get_serialized_editor(1234, workspace_id)
.unwrap()
.unwrap();
@@ -461,11 +465,12 @@ mod tests {
mtime: None,
};
- DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+ editor_db
+ .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
.await
.unwrap();
- let have = DB
+ let have = editor_db
.get_serialized_editor(1234, workspace_id)
.unwrap()
.unwrap();
@@ -479,11 +484,12 @@ mod tests {
mtime: Some(MTime::from_seconds_and_nanos(100, 42)),
};
- DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+ editor_db
+ .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
.await
.unwrap();
- let have = DB
+ let have = editor_db
.get_serialized_editor(1234, workspace_id)
.unwrap()
.unwrap();
@@ -499,8 +505,10 @@ mod tests {
// The search uses contains_str_at() to find fingerprints in the buffer.
#[gpui::test]
- async fn test_save_and_get_file_folds() {
- let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+ async fn test_save_and_get_file_folds(cx: &mut gpui::TestAppContext) {
+ let db = cx.update(|cx| workspace::WorkspaceDb::global(cx));
+ let workspace_id = db.next_id().await.unwrap();
+ let editor_db = cx.update(|cx| EditorDb::global(cx));
// file_folds table uses path as key (no FK to editors table)
let file_path: Arc<Path> = Arc::from(Path::new("/tmp/test_file_folds.rs"));
@@ -520,12 +528,13 @@ mod tests {
"} // end Foo".to_string(),
),
];
- DB.save_file_folds(workspace_id, file_path.clone(), folds.clone())
+ editor_db
+ .save_file_folds(workspace_id, file_path.clone(), folds.clone())
.await
.unwrap();
// Retrieve and verify fingerprints are preserved
- let retrieved = DB.get_file_folds(workspace_id, &file_path).unwrap();
+ let retrieved = editor_db.get_file_folds(workspace_id, &file_path).unwrap();
assert_eq!(retrieved.len(), 2);
assert_eq!(
retrieved[0],
@@ -553,11 +562,12 @@ mod tests {
"impl Bar {".to_string(),
"} // end impl".to_string(),
)];
- DB.save_file_folds(workspace_id, file_path.clone(), new_folds)
+ editor_db
+ .save_file_folds(workspace_id, file_path.clone(), new_folds)
.await
.unwrap();
- let retrieved = DB.get_file_folds(workspace_id, &file_path).unwrap();
+ let retrieved = editor_db.get_file_folds(workspace_id, &file_path).unwrap();
assert_eq!(retrieved.len(), 1);
assert_eq!(
retrieved[0],
@@ -570,10 +580,11 @@ mod tests {
);
// Test delete
- DB.delete_file_folds(workspace_id, file_path.clone())
+ editor_db
+ .delete_file_folds(workspace_id, file_path.clone())
.await
.unwrap();
- let retrieved = DB.get_file_folds(workspace_id, &file_path).unwrap();
+ let retrieved = editor_db.get_file_folds(workspace_id, &file_path).unwrap();
assert!(retrieved.is_empty());
// Test multiple files don't interfere
@@ -582,15 +593,21 @@ mod tests {
let folds_a = vec![(10, 20, "a_start".to_string(), "a_end".to_string())];
let folds_b = vec![(30, 40, "b_start".to_string(), "b_end".to_string())];
- DB.save_file_folds(workspace_id, file_path_a.clone(), folds_a)
+ editor_db
+ .save_file_folds(workspace_id, file_path_a.clone(), folds_a)
.await
.unwrap();
- DB.save_file_folds(workspace_id, file_path_b.clone(), folds_b)
+ editor_db
+ .save_file_folds(workspace_id, file_path_b.clone(), folds_b)
.await
.unwrap();
- let retrieved_a = DB.get_file_folds(workspace_id, &file_path_a).unwrap();
- let retrieved_b = DB.get_file_folds(workspace_id, &file_path_b).unwrap();
+ let retrieved_a = editor_db
+ .get_file_folds(workspace_id, &file_path_a)
+ .unwrap();
+ let retrieved_b = editor_db
+ .get_file_folds(workspace_id, &file_path_b)
+ .unwrap();
assert_eq!(retrieved_a.len(), 1);
assert_eq!(retrieved_b.len(), 1);
@@ -8,7 +8,7 @@ use crate::{
InlayHintRefreshReason, MultiBufferSnapshot, RowExt, ToPoint,
display_map::{DisplaySnapshot, ToDisplayPoint},
hover_popover::hide_hover,
- persistence::DB,
+ persistence::EditorDb,
};
pub use autoscroll::{Autoscroll, AutoscrollStrategy};
use core::fmt::Debug;
@@ -467,12 +467,13 @@ impl ScrollManager {
let item_id = cx.entity().entity_id().as_u64() as ItemId;
let executor = cx.background_executor().clone();
+ let db = EditorDb::global(cx);
self._save_scroll_position_task = cx.background_executor().spawn(async move {
executor.timer(Duration::from_millis(10)).await;
log::debug!(
"Saving scroll position for item {item_id:?} in workspace {workspace_id:?}"
);
- DB.save_scroll_position(
+ db.save_scroll_position(
item_id,
workspace_id,
top_row,
@@ -937,7 +938,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Editor>,
) {
- let scroll_position = DB.get_scroll_position(item_id, workspace_id);
+ let scroll_position = EditorDb::global(cx).get_scroll_position(item_id, workspace_id);
if let Ok(Some((top_row, x, y))) = scroll_position {
let top_anchor = self
.buffer()
@@ -7,7 +7,7 @@ use std::{
use collections::HashMap;
use gpui::Pixels;
use itertools::Itertools as _;
-use language::{Bias, Point, Selection, SelectionGoal};
+use language::{Bias, Point, PointUtf16, Selection, SelectionGoal};
use multi_buffer::{MultiBufferDimension, MultiBufferOffset};
use util::post_inc;
@@ -408,11 +408,11 @@ impl SelectionsCollection {
}
/// Attempts to build a selection in the provided buffer row using the
- /// same buffer column range as specified.
+ /// same UTF-16 column range as specified.
/// Returns `None` if the range is not empty but it starts past the line's
/// length, meaning that the line isn't long enough to be contained within
/// part of the provided range.
- pub fn build_columnar_selection_from_buffer_columns(
+ fn build_columnar_selection_from_utf16_columns(
&mut self,
display_map: &DisplaySnapshot,
buffer_row: u32,
@@ -420,23 +420,22 @@ impl SelectionsCollection {
reversed: bool,
text_layout_details: &TextLayoutDetails,
) -> Option<Selection<Point>> {
+ let snapshot = display_map.buffer_snapshot();
let is_empty = positions.start == positions.end;
- let line_len = display_map
- .buffer_snapshot()
- .line_len(multi_buffer::MultiBufferRow(buffer_row));
+ let line_len_utf16 = snapshot.line_len_utf16(multi_buffer::MultiBufferRow(buffer_row));
let (start, end) = if is_empty {
- let column = std::cmp::min(positions.start, line_len);
- let point = Point::new(buffer_row, column);
+ let column = std::cmp::min(positions.start, line_len_utf16);
+ let point = snapshot.point_utf16_to_point(PointUtf16::new(buffer_row, column));
(point, point)
} else {
- if positions.start >= line_len {
+ if positions.start >= line_len_utf16 {
return None;
}
- let start = Point::new(buffer_row, positions.start);
- let end_column = std::cmp::min(positions.end, line_len);
- let end = Point::new(buffer_row, end_column);
+ let start = snapshot.point_utf16_to_point(PointUtf16::new(buffer_row, positions.start));
+ let end_column = std::cmp::min(positions.end, line_len_utf16);
+ let end = snapshot.point_utf16_to_point(PointUtf16::new(buffer_row, end_column));
(start, end)
};
@@ -510,7 +509,7 @@ impl SelectionsCollection {
row = new_row.row();
let buffer_row = new_row.to_point(display_map).row;
- if let Some(selection) = self.build_columnar_selection_from_buffer_columns(
+ if let Some(selection) = self.build_columnar_selection_from_utf16_columns(
display_map,
buffer_row,
goal_columns,
@@ -328,6 +328,9 @@ impl ExampleContext {
"{}Bug: Tool confirmation should not be required in eval",
log_prefix
),
+ ThreadEvent::Plan(plan) => {
+ println!("{log_prefix} Got plan: {plan:?}");
+ }
ThreadEvent::SubagentSpawned(session) => {
println!("{log_prefix} Got subagent spawn: {session:?}");
}
@@ -11,7 +11,6 @@ use std::sync::Arc;
use ::lsp::LanguageServerName;
use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
-use fs::normalize_path;
use gpui::{App, Task};
use language::LanguageName;
use semver::Version;
@@ -57,7 +56,7 @@ pub trait Extension: Send + Sync + 'static {
/// Returns a path relative to this extension's working directory.
fn path_from_extension(&self, path: &Path) -> PathBuf {
- normalize_path(&self.work_dir().join(path))
+ util::normalize_path(&self.work_dir().join(path))
}
async fn language_server_command(
@@ -1,12 +1,13 @@
use std::collections::HashMap;
use std::sync::{Arc, OnceLock};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::Editor;
use extension_host::ExtensionStore;
use gpui::{AppContext as _, Context, Entity, SharedString, Window};
use language::Buffer;
use ui::prelude::*;
+use util::ResultExt;
use util::rel_path::RelPath;
use workspace::notifications::simple_message_notification::MessageNotification;
use workspace::{Workspace, notifications::NotificationId};
@@ -147,7 +148,8 @@ pub(crate) fn suggest(buffer: Entity<Buffer>, window: &mut Window, cx: &mut Cont
};
let key = language_extension_key(&extension_id);
- let Ok(None) = KEY_VALUE_STORE.read_kvp(&key) else {
+ let kvp = KeyValueStore::global(cx);
+ let Ok(None) = kvp.read_kvp(&key) else {
return;
};
@@ -193,9 +195,11 @@ pub(crate) fn suggest(buffer: Entity<Buffer>, window: &mut Window, cx: &mut Cont
.secondary_icon_color(Color::Error)
.secondary_on_click(move |_window, cx| {
let key = language_extension_key(&extension_id);
- db::write_and_log(cx, move || {
- KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string())
- });
+ let kvp = KeyValueStore::global(cx);
+ cx.background_spawn(async move {
+ kvp.write_kvp(key, "dismissed".to_string()).await.log_err()
+ })
+ .detach();
})
})
});
@@ -62,3 +62,23 @@ impl FeatureFlag for StreamingEditFileToolFeatureFlag {
true
}
}
+
+pub struct UpdatePlanToolFeatureFlag;
+
+impl FeatureFlag for UpdatePlanToolFeatureFlag {
+ const NAME: &'static str = "update-plan-tool";
+
+ fn enabled_for_staff() -> bool {
+ true
+ }
+}
+
+pub struct ProjectPanelUndoRedoFeatureFlag;
+
+impl FeatureFlag for ProjectPanelUndoRedoFeatureFlag {
+ const NAME: &'static str = "project-panel-undo-redo";
+
+ fn enabled_for_staff() -> bool {
+ false
+ }
+}
@@ -392,6 +392,8 @@ impl GitRepository for FakeGitRepository {
.map(|branch_name| {
let ref_name = if branch_name.starts_with("refs/") {
branch_name.into()
+ } else if branch_name.contains('/') {
+ format!("refs/remotes/{branch_name}").into()
} else {
format!("refs/heads/{branch_name}").into()
};
@@ -425,7 +427,7 @@ impl GitRepository for FakeGitRepository {
.unwrap_or_else(|| "refs/heads/main".to_string());
let main_worktree = Worktree {
path: work_dir,
- ref_name: branch_ref.into(),
+ ref_name: Some(branch_ref.into()),
sha: head_sha.into(),
};
let mut all = vec![main_worktree];
@@ -436,15 +438,14 @@ impl GitRepository for FakeGitRepository {
fn create_worktree(
&self,
- name: String,
- directory: PathBuf,
+ branch_name: String,
+ path: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>> {
let fs = self.fs.clone();
let executor = self.executor.clone();
let dot_git_path = self.dot_git_path.clone();
async move {
- let path = directory.join(&name);
executor.simulate_random_delay().await;
// Check for simulated error before any side effects
fs.with_git_state(&dot_git_path, false, |state| {
@@ -459,18 +460,18 @@ impl GitRepository for FakeGitRepository {
fs.with_git_state(&dot_git_path, true, {
let path = path.clone();
move |state| {
- if state.branches.contains(&name) {
- bail!("a branch named '{}' already exists", name);
+ if state.branches.contains(&branch_name) {
+ bail!("a branch named '{}' already exists", branch_name);
}
- let ref_name = format!("refs/heads/{name}");
+ let ref_name = format!("refs/heads/{branch_name}");
let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
state.refs.insert(ref_name.clone(), sha.clone());
state.worktrees.push(Worktree {
path,
- ref_name: ref_name.into(),
+ ref_name: Some(ref_name.into()),
sha: sha.into(),
});
- state.branches.insert(name);
+ state.branches.insert(branch_name);
Ok::<(), anyhow::Error>(())
}
})??;
@@ -60,6 +60,8 @@ use git::{
repository::{InitialGraphCommitData, RepoPath, repo_path},
status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
+#[cfg(feature = "test-support")]
+use util::normalize_path;
#[cfg(feature = "test-support")]
use smol::io::AsyncReadExt;
@@ -644,9 +646,12 @@ impl Fs for RealFs {
code == libc::ENOSYS
|| code == libc::ENOTSUP
|| code == libc::EOPNOTSUPP
+ || code == libc::EINVAL
}) =>
{
// For case when filesystem or kernel does not support atomic no-overwrite rename.
+ // EINVAL is returned by FUSE-based filesystems (e.g. NTFS via ntfs-3g)
+ // that don't support RENAME_NOREPLACE.
true
}
Err(error) => return Err(error.into()),
@@ -2882,10 +2887,6 @@ impl Fs for FakeFs {
}
}
-pub fn normalize_path(path: &Path) -> PathBuf {
- util::normalize_path(path)
-}
-
pub async fn copy_recursive<'a>(
fs: &'a dyn Fs,
source: &'a Path,
@@ -6,139 +6,111 @@ use util::path;
#[gpui::test]
async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
- let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"];
-
- for worktree_dir_setting in worktree_dir_settings {
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"}))
- .await;
- let repo = fs
- .open_repo(Path::new("/project/.git"), None)
- .expect("should open fake repo");
-
- // Initially only the main worktree exists
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 1);
- assert_eq!(worktrees[0].path, PathBuf::from("/project"));
-
- let expected_dir = git::repository::resolve_worktree_directory(
- Path::new("/project"),
- worktree_dir_setting,
- );
-
- // Create a worktree
- repo.create_worktree(
- "feature-branch".to_string(),
- expected_dir.clone(),
- Some("abc123".to_string()),
- )
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"}))
+ .await;
+ let repo = fs
+ .open_repo(Path::new("/project/.git"), None)
+ .expect("should open fake repo");
+
+ // Initially only the main worktree exists
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 1);
+ assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+
+ fs.create_dir("/my-worktrees".as_ref()).await.unwrap();
+ let worktrees_dir = Path::new("/my-worktrees");
+
+ // Create a worktree
+ let worktree_1_dir = worktrees_dir.join("feature-branch");
+ repo.create_worktree(
+ "feature-branch".to_string(),
+ worktree_1_dir.clone(),
+ Some("abc123".to_string()),
+ )
+ .await
+ .unwrap();
+
+ // List worktrees — should have main + one created
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 2);
+ assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+ assert_eq!(worktrees[1].path, worktree_1_dir);
+ assert_eq!(
+ worktrees[1].ref_name,
+ Some("refs/heads/feature-branch".into())
+ );
+ assert_eq!(worktrees[1].sha.as_ref(), "abc123");
+
+ // Directory should exist in FakeFs after create
+ assert!(fs.is_dir(&worktrees_dir.join("feature-branch")).await);
+
+ // Create a second worktree (without explicit commit)
+ let worktree_2_dir = worktrees_dir.join("bugfix-branch");
+ repo.create_worktree("bugfix-branch".to_string(), worktree_2_dir.clone(), None)
.await
.unwrap();
- // List worktrees — should have main + one created
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 2);
- assert_eq!(worktrees[0].path, PathBuf::from("/project"));
- assert_eq!(
- worktrees[1].path,
- expected_dir.join("feature-branch"),
- "failed for worktree_directory setting: {worktree_dir_setting:?}"
- );
- assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
- assert_eq!(worktrees[1].sha.as_ref(), "abc123");
-
- // Directory should exist in FakeFs after create
- assert!(
- fs.is_dir(&expected_dir.join("feature-branch")).await,
- "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
- );
-
- // Create a second worktree (without explicit commit)
- repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None)
- .await
- .unwrap();
-
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 3);
- assert!(
- fs.is_dir(&expected_dir.join("bugfix-branch")).await,
- "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
- );
-
- // Rename the first worktree
- repo.rename_worktree(
- expected_dir.join("feature-branch"),
- expected_dir.join("renamed-branch"),
- )
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 3);
+ assert!(fs.is_dir(&worktree_2_dir).await);
+
+ // Rename the first worktree
+ repo.rename_worktree(worktree_1_dir, worktrees_dir.join("renamed-branch"))
.await
.unwrap();
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 3);
- assert!(
- worktrees
- .iter()
- .any(|w| w.path == expected_dir.join("renamed-branch")),
- "renamed worktree should exist at new path for setting {worktree_dir_setting:?}"
- );
- assert!(
- worktrees
- .iter()
- .all(|w| w.path != expected_dir.join("feature-branch")),
- "old path should no longer exist for setting {worktree_dir_setting:?}"
- );
-
- // Directory should be moved in FakeFs after rename
- assert!(
- !fs.is_dir(&expected_dir.join("feature-branch")).await,
- "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}"
- );
- assert!(
- fs.is_dir(&expected_dir.join("renamed-branch")).await,
- "new worktree directory should exist after rename for setting {worktree_dir_setting:?}"
- );
-
- // Rename a nonexistent worktree should fail
- let result = repo
- .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere"))
- .await;
- assert!(result.is_err());
-
- // Remove a worktree
- repo.remove_worktree(expected_dir.join("renamed-branch"), false)
- .await
- .unwrap();
-
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 2);
- assert_eq!(worktrees[0].path, PathBuf::from("/project"));
- assert_eq!(worktrees[1].path, expected_dir.join("bugfix-branch"));
-
- // Directory should be removed from FakeFs after remove
- assert!(
- !fs.is_dir(&expected_dir.join("renamed-branch")).await,
- "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
- );
-
- // Remove a nonexistent worktree should fail
- let result = repo
- .remove_worktree(PathBuf::from("/nonexistent"), false)
- .await;
- assert!(result.is_err());
-
- // Remove the last worktree
- repo.remove_worktree(expected_dir.join("bugfix-branch"), false)
- .await
- .unwrap();
-
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 1);
- assert_eq!(worktrees[0].path, PathBuf::from("/project"));
- assert!(
- !fs.is_dir(&expected_dir.join("bugfix-branch")).await,
- "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
- );
- }
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 3);
+ assert!(
+ worktrees
+ .iter()
+ .any(|w| w.path == worktrees_dir.join("renamed-branch")),
+ );
+ assert!(
+ worktrees
+ .iter()
+ .all(|w| w.path != worktrees_dir.join("feature-branch")),
+ );
+
+ // Directory should be moved in FakeFs after rename
+ assert!(!fs.is_dir(&worktrees_dir.join("feature-branch")).await);
+ assert!(fs.is_dir(&worktrees_dir.join("renamed-branch")).await);
+
+ // Rename a nonexistent worktree should fail
+ let result = repo
+ .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere"))
+ .await;
+ assert!(result.is_err());
+
+ // Remove a worktree
+ repo.remove_worktree(worktrees_dir.join("renamed-branch"), false)
+ .await
+ .unwrap();
+
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 2);
+ assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+ assert_eq!(worktrees[1].path, worktree_2_dir);
+
+ // Directory should be removed from FakeFs after remove
+ assert!(!fs.is_dir(&worktrees_dir.join("renamed-branch")).await);
+
+ // Remove a nonexistent worktree should fail
+ let result = repo
+ .remove_worktree(PathBuf::from("/nonexistent"), false)
+ .await;
+ assert!(result.is_err());
+
+ // Remove the last worktree
+ repo.remove_worktree(worktree_2_dir.clone(), false)
+ .await
+ .unwrap();
+
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 1);
+ assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+ assert!(!fs.is_dir(&worktree_2_dir).await);
}
#[gpui::test]
@@ -36,7 +36,7 @@ use thiserror::Error;
use util::command::{Stdio, new_command};
use util::paths::PathStyle;
use util::rel_path::RelPath;
-use util::{ResultExt, normalize_path, paths};
+use util::{ResultExt, paths};
use uuid::Uuid;
pub use askpass::{AskPassDelegate, AskPassResult, AskPassSession};
@@ -76,97 +76,6 @@ pub fn original_repo_path_from_common_dir(common_dir: &Path) -> PathBuf {
}
}
-/// Resolves the configured worktree directory to an absolute path.
-///
-/// `worktree_directory_setting` is the raw string from the user setting
-/// (e.g. `"../worktrees"`, `".git/zed-worktrees"`, `"my-worktrees/"`).
-/// Trailing slashes are stripped. The path is resolved relative to
-/// `working_directory` (the repository's working directory root).
-///
-/// When the resolved directory falls outside the working directory
-/// (e.g. `"../worktrees"`), the repository's directory name is
-/// automatically appended so that sibling repos don't collide.
-/// For example, with working directory `~/code/zed` and setting
-/// `"../worktrees"`, this returns `~/code/worktrees/zed`.
-///
-/// When the resolved directory is inside the working directory
-/// (e.g. `".git/zed-worktrees"`), no extra component is added
-/// because the path is already project-scoped.
-pub fn resolve_worktree_directory(
- working_directory: &Path,
- worktree_directory_setting: &str,
-) -> PathBuf {
- let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
- let joined = working_directory.join(trimmed);
- let resolved = normalize_path(&joined);
-
- if resolved.starts_with(working_directory) {
- resolved
- } else if let Some(repo_dir_name) = working_directory.file_name() {
- resolved.join(repo_dir_name)
- } else {
- resolved
- }
-}
-
-/// Validates that the resolved worktree directory is acceptable:
-/// - The setting must not be an absolute path.
-/// - The resolved path must be either a subdirectory of the working
-/// directory or a subdirectory of its parent (i.e., a sibling).
-///
-/// Returns `Ok(resolved_path)` or an error with a user-facing message.
-pub fn validate_worktree_directory(
- working_directory: &Path,
- worktree_directory_setting: &str,
-) -> Result<PathBuf> {
- // Check the original setting before trimming, since a path like "///"
- // is absolute but becomes "" after stripping trailing separators.
- // Also check for leading `/` or `\` explicitly, because on Windows
- // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
- // would slip through even though it's clearly not a relative path.
- if Path::new(worktree_directory_setting).is_absolute()
- || worktree_directory_setting.starts_with('/')
- || worktree_directory_setting.starts_with('\\')
- {
- anyhow::bail!(
- "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
- );
- }
-
- if worktree_directory_setting.is_empty() {
- anyhow::bail!("git.worktree_directory must not be empty");
- }
-
- let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
- if trimmed == ".." {
- anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
- }
-
- let resolved = resolve_worktree_directory(working_directory, worktree_directory_setting);
-
- let parent = working_directory.parent().unwrap_or(working_directory);
-
- if !resolved.starts_with(parent) {
- anyhow::bail!(
- "git.worktree_directory resolved to {resolved:?}, which is outside \
- the project root and its parent directory. It must resolve to a \
- subdirectory of {working_directory:?} or a sibling of it."
- );
- }
-
- Ok(resolved)
-}
-
-/// Returns the full absolute path for a specific branch's worktree
-/// given the resolved worktree directory.
-pub fn worktree_path_for_branch(
- working_directory: &Path,
- worktree_directory_setting: &str,
- branch: &str,
-) -> PathBuf {
- resolve_worktree_directory(working_directory, worktree_directory_setting).join(branch)
-}
-
/// Commit data needed for the git graph visualization.
#[derive(Debug, Clone)]
pub struct GraphCommitData {
@@ -303,18 +212,25 @@ impl Branch {
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Worktree {
pub path: PathBuf,
- pub ref_name: SharedString,
+ pub ref_name: Option<SharedString>,
// todo(git_worktree) This type should be a Oid
pub sha: SharedString,
}
impl Worktree {
- pub fn branch(&self) -> &str {
- self.ref_name
- .as_ref()
- .strip_prefix("refs/heads/")
- .or_else(|| self.ref_name.as_ref().strip_prefix("refs/remotes/"))
- .unwrap_or(self.ref_name.as_ref())
+ /// Returns a display name for the worktree, suitable for use in the UI.
+ ///
+ /// If the worktree is attached to a branch, returns the branch name.
+ /// Otherwise, returns the short SHA of the worktree's HEAD commit.
+ pub fn display_name(&self) -> &str {
+ match self.ref_name {
+ Some(ref ref_name) => ref_name
+ .strip_prefix("refs/heads/")
+ .or_else(|| ref_name.strip_prefix("refs/remotes/"))
+ .unwrap_or(ref_name),
+ // Detached HEAD — show the short SHA as a fallback.
+ None => &self.sha[..self.sha.len().min(SHORT_SHA_LENGTH)],
+ }
}
}
@@ -342,12 +258,10 @@ pub fn parse_worktrees_from_str<T: AsRef<str>>(raw_worktrees: T) -> Vec<Worktree
// Ignore other lines: detached, bare, locked, prunable, etc.
}
- // todo(git_worktree) We should add a test for detach head state
- // a detach head will have ref_name as none so we would skip it
- if let (Some(path), Some(sha), Some(ref_name)) = (path, sha, ref_name) {
+ if let (Some(path), Some(sha)) = (path, sha) {
worktrees.push(Worktree {
path: PathBuf::from(path),
- ref_name: ref_name.into(),
+ ref_name: ref_name.map(Into::into),
sha: sha.into(),
})
}
@@ -769,8 +683,8 @@ pub trait GitRepository: Send + Sync {
fn create_worktree(
&self,
- name: String,
- directory: PathBuf,
+ branch_name: String,
+ path: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>>;
@@ -1034,6 +948,7 @@ impl RealGitRepository {
self.any_git_binary_path.clone(),
self.working_directory()
.with_context(|| "Can't run git commands without a working directory")?,
+ self.path(),
self.executor.clone(),
self.is_trusted(),
))
@@ -1088,6 +1003,7 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter {
let git = GitBinary::new(
git_binary_path.unwrap_or(PathBuf::from("git")),
paths::home_dir().clone(),
+ paths::home_dir().join(".git"),
cx.background_executor().clone(),
true,
);
@@ -1712,20 +1628,19 @@ impl GitRepository for RealGitRepository {
fn create_worktree(
&self,
- name: String,
- directory: PathBuf,
+ branch_name: String,
+ path: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>> {
let git_binary = self.git_binary();
- let final_path = directory.join(&name);
let mut args = vec![
OsString::from("--no-optional-locks"),
OsString::from("worktree"),
OsString::from("add"),
OsString::from("-b"),
- OsString::from(name.as_str()),
+ OsString::from(branch_name.as_str()),
OsString::from("--"),
- OsString::from(final_path.as_os_str()),
+ OsString::from(path.as_os_str()),
];
if let Some(from_commit) = from_commit {
args.push(OsString::from(from_commit));
@@ -1735,7 +1650,7 @@ impl GitRepository for RealGitRepository {
self.executor
.spawn(async move {
- std::fs::create_dir_all(final_path.parent().unwrap_or(&final_path))?;
+ std::fs::create_dir_all(path.parent().unwrap_or(&path))?;
let git = git_binary?;
let output = git.build_command(&args).output().await?;
if output.status.success() {
@@ -2246,6 +2161,7 @@ impl GitRepository for RealGitRepository {
cx: AsyncApp,
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
let working_directory = self.working_directory();
+ let git_directory = self.path();
let executor = cx.background_executor().clone();
let git_binary_path = self.system_git_binary_path.clone();
let is_trusted = self.is_trusted();
@@ -2257,6 +2173,7 @@ impl GitRepository for RealGitRepository {
let git = GitBinary::new(
git_binary_path,
working_directory,
+ git_directory,
executor.clone(),
is_trusted,
);
@@ -2288,6 +2205,7 @@ impl GitRepository for RealGitRepository {
cx: AsyncApp,
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
let working_directory = self.working_directory();
+ let git_directory = self.path();
let executor = cx.background_executor().clone();
let git_binary_path = self.system_git_binary_path.clone();
let is_trusted = self.is_trusted();
@@ -2299,6 +2217,7 @@ impl GitRepository for RealGitRepository {
let git = GitBinary::new(
git_binary_path,
working_directory,
+ git_directory,
executor.clone(),
is_trusted,
);
@@ -2328,6 +2247,7 @@ impl GitRepository for RealGitRepository {
cx: AsyncApp,
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
let working_directory = self.working_directory();
+ let git_directory = self.path();
let remote_name = format!("{}", fetch_options);
let git_binary_path = self.system_git_binary_path.clone();
let executor = cx.background_executor().clone();
@@ -2340,6 +2260,7 @@ impl GitRepository for RealGitRepository {
let git = GitBinary::new(
git_binary_path,
working_directory,
+ git_directory,
executor.clone(),
is_trusted,
);
@@ -2992,6 +2913,7 @@ async fn exclude_files(git: &GitBinary) -> Result<GitExcludeOverride> {
pub(crate) struct GitBinary {
git_binary_path: PathBuf,
working_directory: PathBuf,
+ git_directory: PathBuf,
executor: BackgroundExecutor,
index_file_path: Option<PathBuf>,
envs: HashMap<String, String>,
@@ -3002,12 +2924,14 @@ impl GitBinary {
pub(crate) fn new(
git_binary_path: PathBuf,
working_directory: PathBuf,
+ git_directory: PathBuf,
executor: BackgroundExecutor,
is_trusted: bool,
) -> Self {
Self {
git_binary_path,
working_directory,
+ git_directory,
executor,
index_file_path: None,
envs: HashMap::default(),
@@ -3053,12 +2977,9 @@ impl GitBinary {
// Copy the default index file so that Git doesn't have to rebuild the
// whole index from scratch. This might fail if this is an empty repository.
- smol::fs::copy(
- self.working_directory.join(".git").join("index"),
- &index_file_path,
- )
- .await
- .ok();
+ smol::fs::copy(self.git_directory.join("index"), &index_file_path)
+ .await
+ .ok();
self.index_file_path = Some(index_file_path.clone());
let result = f(self).await;
@@ -3072,19 +2993,13 @@ impl GitBinary {
}
pub async fn with_exclude_overrides(&self) -> Result<GitExcludeOverride> {
- let path = self
- .working_directory
- .join(".git")
- .join("info")
- .join("exclude");
+ let path = self.git_directory.join("info").join("exclude");
GitExcludeOverride::new(path).await
}
fn path_for_index_id(&self, id: Uuid) -> PathBuf {
- self.working_directory
- .join(".git")
- .join(format!("index-{}.tmp", id))
+ self.git_directory.join(format!("index-{}.tmp", id))
}
pub async fn run<S>(&self, args: &[S]) -> Result<String>
@@ -3390,6 +3305,8 @@ fn checkpoint_author_envs() -> HashMap<String, String> {
#[cfg(test)]
mod tests {
+ use std::fs;
+
use super::*;
use gpui::TestAppContext;
@@ -3407,6 +3324,7 @@ mod tests {
let git = GitBinary::new(
PathBuf::from("git"),
dir.path().to_path_buf(),
+ dir.path().join(".git"),
cx.executor(),
false,
);
@@ -3420,6 +3338,7 @@ mod tests {
let git = GitBinary::new(
PathBuf::from("git"),
dir.path().to_path_buf(),
+ dir.path().join(".git"),
cx.executor(),
false,
);
@@ -3439,6 +3358,7 @@ mod tests {
let git = GitBinary::new(
PathBuf::from("git"),
dir.path().to_path_buf(),
+ dir.path().join(".git"),
cx.executor(),
false,
);
@@ -3464,6 +3384,7 @@ mod tests {
let git = GitBinary::new(
PathBuf::from("git"),
dir.path().to_path_buf(),
+ dir.path().join(".git"),
cx.executor(),
true,
);
@@ -3482,6 +3403,7 @@ mod tests {
let git = GitBinary::new(
PathBuf::from("git"),
dir.path().to_path_buf(),
+ dir.path().join(".git"),
cx.executor(),
true,
);
@@ -3496,6 +3418,27 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_path_for_index_id_uses_real_git_directory(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ let working_directory = PathBuf::from("/code/worktree");
+ let git_directory = PathBuf::from("/code/repo/.git/modules/worktree");
+ let git = GitBinary::new(
+ PathBuf::from("git"),
+ working_directory,
+ git_directory.clone(),
+ cx.executor(),
+ false,
+ );
+
+ let path = git.path_for_index_id(Uuid::nil());
+
+ assert_eq!(
+ path,
+ git_directory.join(format!("index-{}.tmp", Uuid::nil()))
+ );
+ }
+
#[gpui::test]
async fn test_checkpoint_basic(cx: &mut TestAppContext) {
disable_git_global_config();
@@ -3855,7 +3798,7 @@ mod tests {
assert_eq!(result.len(), 1);
assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
assert_eq!(result[0].sha.as_ref(), "abc123def");
- assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main");
+ assert_eq!(result[0].ref_name, Some("refs/heads/main".into()));
// Multiple worktrees
let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\
@@ -3863,23 +3806,30 @@ mod tests {
let result = parse_worktrees_from_str(input);
assert_eq!(result.len(), 2);
assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
- assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main");
+ assert_eq!(result[0].ref_name, Some("refs/heads/main".into()));
assert_eq!(result[1].path, PathBuf::from("/home/user/project-wt"));
- assert_eq!(result[1].ref_name.as_ref(), "refs/heads/feature");
+ assert_eq!(result[1].ref_name, Some("refs/heads/feature".into()));
- // Detached HEAD entry (should be skipped since ref_name won't parse)
+ // Detached HEAD entry (included with ref_name: None)
let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\
worktree /home/user/detached\nHEAD def456\ndetached\n\n";
let result = parse_worktrees_from_str(input);
- assert_eq!(result.len(), 1);
+ assert_eq!(result.len(), 2);
assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
+ assert_eq!(result[0].ref_name, Some("refs/heads/main".into()));
+ assert_eq!(result[1].path, PathBuf::from("/home/user/detached"));
+ assert_eq!(result[1].ref_name, None);
+ assert_eq!(result[1].sha.as_ref(), "def456");
- // Bare repo entry (should be skipped)
+ // Bare repo entry (included with ref_name: None)
let input = "worktree /home/user/bare.git\nHEAD abc123\nbare\n\n\
worktree /home/user/project\nHEAD def456\nbranch refs/heads/main\n\n";
let result = parse_worktrees_from_str(input);
- assert_eq!(result.len(), 1);
- assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
+ assert_eq!(result.len(), 2);
+ assert_eq!(result[0].path, PathBuf::from("/home/user/bare.git"));
+ assert_eq!(result[0].ref_name, None);
+ assert_eq!(result[1].path, PathBuf::from("/home/user/project"));
+ assert_eq!(result[1].ref_name, Some("refs/heads/main".into()));
// Extra porcelain lines (locked, prunable) should be ignored
let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\
@@ -3888,11 +3838,14 @@ mod tests {
let result = parse_worktrees_from_str(input);
assert_eq!(result.len(), 3);
assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
- assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main");
+ assert_eq!(result[0].ref_name, Some("refs/heads/main".into()));
assert_eq!(result[1].path, PathBuf::from("/home/user/locked-wt"));
- assert_eq!(result[1].ref_name.as_ref(), "refs/heads/locked-branch");
+ assert_eq!(result[1].ref_name, Some("refs/heads/locked-branch".into()));
assert_eq!(result[2].path, PathBuf::from("/home/user/prunable-wt"));
- assert_eq!(result[2].ref_name.as_ref(), "refs/heads/prunable-branch");
+ assert_eq!(
+ result[2].ref_name,
+ Some("refs/heads/prunable-branch".into())
+ );
// Leading/trailing whitespace on lines should be tolerated
let input =
@@ -3901,7 +3854,7 @@ mod tests {
assert_eq!(result.len(), 1);
assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
assert_eq!(result[0].sha.as_ref(), "abc123");
- assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main");
+ assert_eq!(result[0].ref_name, Some("refs/heads/main".into()));
// Windows-style line endings should be handled
let input = "worktree /home/user/project\r\nHEAD abc123\r\nbranch refs/heads/main\r\n\r\n";
@@ -3909,89 +3862,79 @@ mod tests {
assert_eq!(result.len(), 1);
assert_eq!(result[0].path, PathBuf::from("/home/user/project"));
assert_eq!(result[0].sha.as_ref(), "abc123");
- assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main");
+ assert_eq!(result[0].ref_name, Some("refs/heads/main".into()));
}
- const TEST_WORKTREE_DIRECTORIES: &[&str] =
- &["../worktrees", ".git/zed-worktrees", "my-worktrees/"];
-
#[gpui::test]
async fn test_create_and_list_worktrees(cx: &mut TestAppContext) {
disable_git_global_config();
cx.executor().allow_parking();
- for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES {
- let repo_dir = tempfile::tempdir().unwrap();
- git2::Repository::init(repo_dir.path()).unwrap();
+ let temp_dir = tempfile::tempdir().unwrap();
+ let repo_dir = temp_dir.path().join("repo");
+ let worktrees_dir = temp_dir.path().join("worktrees");
- let repo = RealGitRepository::new(
- &repo_dir.path().join(".git"),
- None,
- Some("git".into()),
- cx.executor(),
- )
- .unwrap();
+ fs::create_dir_all(&repo_dir).unwrap();
+ fs::create_dir_all(&worktrees_dir).unwrap();
- // Create an initial commit (required for worktrees)
- smol::fs::write(repo_dir.path().join("file.txt"), "content")
- .await
- .unwrap();
- repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
- .await
- .unwrap();
- repo.commit(
- "Initial commit".into(),
- None,
- CommitOptions::default(),
- AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
- Arc::new(checkpoint_author_envs()),
- )
- .await
- .unwrap();
+ git2::Repository::init(&repo_dir).unwrap();
- // List worktrees — should have just the main one
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 1);
- assert_eq!(
- worktrees[0].path.canonicalize().unwrap(),
- repo_dir.path().canonicalize().unwrap()
- );
+ let repo = RealGitRepository::new(
+ &repo_dir.join(".git"),
+ None,
+ Some("git".into()),
+ cx.executor(),
+ )
+ .unwrap();
- // Create a new worktree
- repo.create_worktree(
- "test-branch".to_string(),
- resolve_worktree_directory(repo_dir.path(), worktree_dir_setting),
- Some("HEAD".to_string()),
- )
+ // Create an initial commit (required for worktrees)
+ smol::fs::write(repo_dir.join("file.txt"), "content")
.await
.unwrap();
+ repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
+ .await
+ .unwrap();
+ repo.commit(
+ "Initial commit".into(),
+ None,
+ CommitOptions::default(),
+ AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
+ Arc::new(checkpoint_author_envs()),
+ )
+ .await
+ .unwrap();
- // List worktrees — should have two
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 2);
-
- let expected_path =
- worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "test-branch");
- let new_worktree = worktrees
- .iter()
- .find(|w| w.branch() == "test-branch")
- .expect("should find worktree with test-branch");
- assert_eq!(
- new_worktree.path.canonicalize().unwrap(),
- expected_path.canonicalize().unwrap(),
- "failed for worktree_directory setting: {worktree_dir_setting:?}"
- );
+ // List worktrees — should have just the main one
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 1);
+ assert_eq!(
+ worktrees[0].path.canonicalize().unwrap(),
+ repo_dir.canonicalize().unwrap()
+ );
- // Clean up so the next iteration starts fresh
- repo.remove_worktree(expected_path, true).await.unwrap();
+ let worktree_path = worktrees_dir.join("some-worktree");
- // Clean up the worktree base directory if it was created outside repo_dir
- // (e.g. for the "../worktrees" setting, it won't be inside the TempDir)
- let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting);
- if !resolved_dir.starts_with(repo_dir.path()) {
- let _ = std::fs::remove_dir_all(&resolved_dir);
- }
- }
+ // Create a new worktree
+ repo.create_worktree(
+ "test-branch".to_string(),
+ worktree_path.clone(),
+ Some("HEAD".to_string()),
+ )
+ .await
+ .unwrap();
+
+ // List worktrees — should have two
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 2);
+
+ let new_worktree = worktrees
+ .iter()
+ .find(|w| w.display_name() == "test-branch")
+ .expect("should find worktree with test-branch");
+ assert_eq!(
+ new_worktree.path.canonicalize().unwrap(),
+ worktree_path.canonicalize().unwrap(),
+ );
}
#[gpui::test]
@@ -3999,147 +3942,92 @@ mod tests {
disable_git_global_config();
cx.executor().allow_parking();
- for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES {
- let repo_dir = tempfile::tempdir().unwrap();
- git2::Repository::init(repo_dir.path()).unwrap();
+ let temp_dir = tempfile::tempdir().unwrap();
+ let repo_dir = temp_dir.path().join("repo");
+ let worktrees_dir = temp_dir.path().join("worktrees");
+ git2::Repository::init(&repo_dir).unwrap();
- let repo = RealGitRepository::new(
- &repo_dir.path().join(".git"),
- None,
- Some("git".into()),
- cx.executor(),
- )
- .unwrap();
+ let repo = RealGitRepository::new(
+ &repo_dir.join(".git"),
+ None,
+ Some("git".into()),
+ cx.executor(),
+ )
+ .unwrap();
- // Create an initial commit
- smol::fs::write(repo_dir.path().join("file.txt"), "content")
- .await
- .unwrap();
- repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
- .await
- .unwrap();
- repo.commit(
- "Initial commit".into(),
- None,
- CommitOptions::default(),
- AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
- Arc::new(checkpoint_author_envs()),
- )
+ // Create an initial commit
+ smol::fs::write(repo_dir.join("file.txt"), "content")
.await
.unwrap();
-
- // Create a worktree
- repo.create_worktree(
- "to-remove".to_string(),
- resolve_worktree_directory(repo_dir.path(), worktree_dir_setting),
- Some("HEAD".to_string()),
- )
+ repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
.await
.unwrap();
+ repo.commit(
+ "Initial commit".into(),
+ None,
+ CommitOptions::default(),
+ AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
+ Arc::new(checkpoint_author_envs()),
+ )
+ .await
+ .unwrap();
- let worktree_path =
- worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "to-remove");
- assert!(worktree_path.exists());
-
- // Remove the worktree
- repo.remove_worktree(worktree_path.clone(), false)
- .await
- .unwrap();
-
- // Verify it's gone from the list
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 1);
- assert!(
- worktrees.iter().all(|w| w.branch() != "to-remove"),
- "removed worktree should not appear in list"
- );
-
- // Verify the directory is removed
- assert!(!worktree_path.exists());
-
- // Clean up the worktree base directory if it was created outside repo_dir
- // (e.g. for the "../worktrees" setting, it won't be inside the TempDir)
- let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting);
- if !resolved_dir.starts_with(repo_dir.path()) {
- let _ = std::fs::remove_dir_all(&resolved_dir);
- }
- }
- }
+ // Create a worktree
+ let worktree_path = worktrees_dir.join("worktree-to-remove");
+ repo.create_worktree(
+ "to-remove".to_string(),
+ worktree_path.clone(),
+ Some("HEAD".to_string()),
+ )
+ .await
+ .unwrap();
- #[gpui::test]
- async fn test_remove_worktree_force(cx: &mut TestAppContext) {
- disable_git_global_config();
- cx.executor().allow_parking();
+ // Remove the worktree
+ repo.remove_worktree(worktree_path.clone(), false)
+ .await
+ .unwrap();
- for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES {
- let repo_dir = tempfile::tempdir().unwrap();
- git2::Repository::init(repo_dir.path()).unwrap();
+ // Verify the directory is removed
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 1);
+ assert!(
+ worktrees.iter().all(|w| w.display_name() != "to-remove"),
+ "removed worktree should not appear in list"
+ );
+ assert!(!worktree_path.exists());
+
+ // Create a worktree
+ let worktree_path = worktrees_dir.join("dirty-wt");
+ repo.create_worktree(
+ "dirty-wt".to_string(),
+ worktree_path.clone(),
+ Some("HEAD".to_string()),
+ )
+ .await
+ .unwrap();
- let repo = RealGitRepository::new(
- &repo_dir.path().join(".git"),
- None,
- Some("git".into()),
- cx.executor(),
- )
- .unwrap();
+ assert!(worktree_path.exists());
- // Create an initial commit
- smol::fs::write(repo_dir.path().join("file.txt"), "content")
- .await
- .unwrap();
- repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
- .await
- .unwrap();
- repo.commit(
- "Initial commit".into(),
- None,
- CommitOptions::default(),
- AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
- Arc::new(checkpoint_author_envs()),
- )
+ // Add uncommitted changes in the worktree
+ smol::fs::write(worktree_path.join("dirty-file.txt"), "uncommitted")
.await
.unwrap();
- // Create a worktree
- repo.create_worktree(
- "dirty-wt".to_string(),
- resolve_worktree_directory(repo_dir.path(), worktree_dir_setting),
- Some("HEAD".to_string()),
- )
+ // Non-force removal should fail with dirty worktree
+ let result = repo.remove_worktree(worktree_path.clone(), false).await;
+ assert!(
+ result.is_err(),
+ "non-force removal of dirty worktree should fail"
+ );
+
+ // Force removal should succeed
+ repo.remove_worktree(worktree_path.clone(), true)
.await
.unwrap();
- let worktree_path =
- worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "dirty-wt");
-
- // Add uncommitted changes in the worktree
- smol::fs::write(worktree_path.join("dirty-file.txt"), "uncommitted")
- .await
- .unwrap();
-
- // Non-force removal should fail with dirty worktree
- let result = repo.remove_worktree(worktree_path.clone(), false).await;
- assert!(
- result.is_err(),
- "non-force removal of dirty worktree should fail"
- );
-
- // Force removal should succeed
- repo.remove_worktree(worktree_path.clone(), true)
- .await
- .unwrap();
-
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 1);
- assert!(!worktree_path.exists());
-
- // Clean up the worktree base directory if it was created outside repo_dir
- // (e.g. for the "../worktrees" setting, it won't be inside the TempDir)
- let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting);
- if !resolved_dir.starts_with(repo_dir.path()) {
- let _ = std::fs::remove_dir_all(&resolved_dir);
- }
- }
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 1);
+ assert!(!worktree_path.exists());
}
#[gpui::test]
@@ -4147,141 +4035,69 @@ mod tests {
disable_git_global_config();
cx.executor().allow_parking();
- for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES {
- let repo_dir = tempfile::tempdir().unwrap();
- git2::Repository::init(repo_dir.path()).unwrap();
+ let temp_dir = tempfile::tempdir().unwrap();
+ let repo_dir = temp_dir.path().join("repo");
+ let worktrees_dir = temp_dir.path().join("worktrees");
- let repo = RealGitRepository::new(
- &repo_dir.path().join(".git"),
- None,
- Some("git".into()),
- cx.executor(),
- )
- .unwrap();
+ git2::Repository::init(&repo_dir).unwrap();
- // Create an initial commit
- smol::fs::write(repo_dir.path().join("file.txt"), "content")
- .await
- .unwrap();
- repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
- .await
- .unwrap();
- repo.commit(
- "Initial commit".into(),
- None,
- CommitOptions::default(),
- AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
- Arc::new(checkpoint_author_envs()),
- )
+ let repo = RealGitRepository::new(
+ &repo_dir.join(".git"),
+ None,
+ Some("git".into()),
+ cx.executor(),
+ )
+ .unwrap();
+
+ // Create an initial commit
+ smol::fs::write(repo_dir.join("file.txt"), "content")
.await
.unwrap();
-
- // Create a worktree
- repo.create_worktree(
- "old-name".to_string(),
- resolve_worktree_directory(repo_dir.path(), worktree_dir_setting),
- Some("HEAD".to_string()),
- )
+ repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default()))
.await
.unwrap();
+ repo.commit(
+ "Initial commit".into(),
+ None,
+ CommitOptions::default(),
+ AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
+ Arc::new(checkpoint_author_envs()),
+ )
+ .await
+ .unwrap();
- let old_path =
- worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "old-name");
- assert!(old_path.exists());
-
- // Move the worktree to a new path
- let new_path =
- resolve_worktree_directory(repo_dir.path(), worktree_dir_setting).join("new-name");
- repo.rename_worktree(old_path.clone(), new_path.clone())
- .await
- .unwrap();
-
- // Verify the old path is gone and new path exists
- assert!(!old_path.exists());
- assert!(new_path.exists());
-
- // Verify it shows up in worktree list at the new path
- let worktrees = repo.worktrees().await.unwrap();
- assert_eq!(worktrees.len(), 2);
- let moved_worktree = worktrees
- .iter()
- .find(|w| w.branch() == "old-name")
- .expect("should find worktree by branch name");
- assert_eq!(
- moved_worktree.path.canonicalize().unwrap(),
- new_path.canonicalize().unwrap()
- );
-
- // Clean up so the next iteration starts fresh
- repo.remove_worktree(new_path, true).await.unwrap();
-
- // Clean up the worktree base directory if it was created outside repo_dir
- // (e.g. for the "../worktrees" setting, it won't be inside the TempDir)
- let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting);
- if !resolved_dir.starts_with(repo_dir.path()) {
- let _ = std::fs::remove_dir_all(&resolved_dir);
- }
- }
- }
-
- #[test]
- fn test_resolve_worktree_directory() {
- let work_dir = Path::new("/code/my-project");
-
- // Sibling directory — outside project, so repo dir name is appended
- assert_eq!(
- resolve_worktree_directory(work_dir, "../worktrees"),
- PathBuf::from("/code/worktrees/my-project")
- );
-
- // Git subdir — inside project, no repo name appended
- assert_eq!(
- resolve_worktree_directory(work_dir, ".git/zed-worktrees"),
- PathBuf::from("/code/my-project/.git/zed-worktrees")
- );
-
- // Simple subdir — inside project, no repo name appended
- assert_eq!(
- resolve_worktree_directory(work_dir, "my-worktrees"),
- PathBuf::from("/code/my-project/my-worktrees")
- );
-
- // Trailing slash is stripped
- assert_eq!(
- resolve_worktree_directory(work_dir, "../worktrees/"),
- PathBuf::from("/code/worktrees/my-project")
- );
- assert_eq!(
- resolve_worktree_directory(work_dir, "my-worktrees/"),
- PathBuf::from("/code/my-project/my-worktrees")
- );
-
- // Multiple trailing slashes
- assert_eq!(
- resolve_worktree_directory(work_dir, "foo///"),
- PathBuf::from("/code/my-project/foo")
- );
+ // Create a worktree
+ let old_path = worktrees_dir.join("old-worktree-name");
+ repo.create_worktree(
+ "old-name".to_string(),
+ old_path.clone(),
+ Some("HEAD".to_string()),
+ )
+ .await
+ .unwrap();
- // Trailing backslashes (Windows-style)
- assert_eq!(
- resolve_worktree_directory(work_dir, "my-worktrees\\"),
- PathBuf::from("/code/my-project/my-worktrees")
- );
- assert_eq!(
- resolve_worktree_directory(work_dir, "foo\\/\\"),
- PathBuf::from("/code/my-project/foo")
- );
+ assert!(old_path.exists());
- // Empty string resolves to the working directory itself (inside)
- assert_eq!(
- resolve_worktree_directory(work_dir, ""),
- PathBuf::from("/code/my-project")
- );
+ // Move the worktree to a new path
+ let new_path = worktrees_dir.join("new-worktree-name");
+ repo.rename_worktree(old_path.clone(), new_path.clone())
+ .await
+ .unwrap();
- // Just ".." — outside project, repo dir name appended
+ // Verify the old path is gone and new path exists
+ assert!(!old_path.exists());
+ assert!(new_path.exists());
+
+ // Verify it shows up in worktree list at the new path
+ let worktrees = repo.worktrees().await.unwrap();
+ assert_eq!(worktrees.len(), 2);
+ let moved_worktree = worktrees
+ .iter()
+ .find(|w| w.display_name() == "old-name")
+ .expect("should find worktree by branch name");
assert_eq!(
- resolve_worktree_directory(work_dir, ".."),
- PathBuf::from("/code/my-project")
+ moved_worktree.path.canonicalize().unwrap(),
+ new_path.canonicalize().unwrap()
);
}
@@ -2358,7 +2358,7 @@ impl SerializableItem for GitGraph {
alive_items,
workspace_id,
"git_graphs",
- &persistence::GIT_GRAPHS,
+ &persistence::GitGraphsDb::global(cx),
cx,
)
}
@@ -2371,7 +2371,8 @@ impl SerializableItem for GitGraph {
window: &mut Window,
cx: &mut App,
) -> Task<gpui::Result<Entity<Self>>> {
- if persistence::GIT_GRAPHS
+ let db = persistence::GitGraphsDb::global(cx);
+ if db
.get_git_graph(item_id, workspace_id)
.ok()
.is_some_and(|is_open| is_open)
@@ -2392,11 +2393,12 @@ impl SerializableItem for GitGraph {
cx: &mut Context<Self>,
) -> Option<Task<gpui::Result<()>>> {
let workspace_id = workspace.database_id()?;
- Some(cx.background_spawn(async move {
- persistence::GIT_GRAPHS
- .save_git_graph(item_id, workspace_id, true)
- .await
- }))
+ let db = persistence::GitGraphsDb::global(cx);
+ Some(
+ cx.background_spawn(
+ async move { db.save_git_graph(item_id, workspace_id, true).await },
+ ),
+ )
}
fn should_serialize(&self, event: &Self::Event) -> bool {
@@ -2430,7 +2432,7 @@ mod persistence {
)]);
}
- db::static_connection!(GIT_GRAPHS, GitGraphsDb, [WorkspaceDb]);
+ db::static_connection!(GitGraphsDb, [WorkspaceDb]);
impl GitGraphsDb {
query! {
@@ -16,10 +16,7 @@ use project::project_settings::ProjectSettings;
use settings::Settings;
use std::sync::Arc;
use time::OffsetDateTime;
-use ui::{
- Divider, HighlightedLabel, KeyBinding, ListHeader, ListItem, ListItemSpacing, Tooltip,
- prelude::*,
-};
+use ui::{Divider, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*};
use ui_input::ErasedEditor;
use util::ResultExt;
use workspace::notifications::DetachAndPromptErr;
@@ -1084,21 +1081,6 @@ impl PickerDelegate for BranchListDelegate {
)
}
- fn render_header(
- &self,
- _window: &mut Window,
- _cx: &mut Context<Picker<Self>>,
- ) -> Option<AnyElement> {
- matches!(self.state, PickerState::List).then(|| {
- let label = match self.branch_filter {
- BranchFilter::All => "Branches",
- BranchFilter::Remote => "Remotes",
- };
-
- ListHeader::new(label).inset(true).into_any_element()
- })
- }
-
fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
if self.editor_position() == PickerEditorPosition::End {
return None;
@@ -1193,7 +1175,11 @@ impl PickerDelegate for BranchListDelegate {
this.justify_between()
.child({
let focus_handle = focus_handle.clone();
- Button::new("filter-remotes", "Filter Remotes")
+ let filter_label = match self.branch_filter {
+ BranchFilter::All => "Filter Remote",
+ BranchFilter::Remote => "Show All",
+ };
+ Button::new("filter-remotes", filter_label)
.toggle_state(matches!(
self.branch_filter,
BranchFilter::Remote
@@ -1509,6 +1495,30 @@ mod tests {
});
cx.run_until_parked();
+ let expected_branches = ["main", "feature-auth", "feature-ui", "develop"]
+ .into_iter()
+ .filter(|name| name != &branch_to_delete)
+ .collect::<HashSet<_>>();
+ let repo_branches = branch_list
+ .update(cx, |branch_list, cx| {
+ branch_list.picker.update(cx, |picker, cx| {
+ picker
+ .delegate
+ .repo
+ .as_ref()
+ .unwrap()
+ .update(cx, |repo, _cx| repo.branches())
+ })
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ let repo_branches = repo_branches
+ .iter()
+ .map(|b| b.name())
+ .collect::<HashSet<_>>();
+ assert_eq!(&repo_branches, &expected_branches);
+
branch_list.update(cx, move |branch_list, cx| {
branch_list.picker.update(cx, move |picker, _cx| {
assert_eq!(picker.delegate.matches.len(), 3);
@@ -1518,13 +1528,7 @@ mod tests {
.iter()
.map(|be| be.name())
.collect::<HashSet<_>>();
- assert_eq!(
- branches,
- ["main", "feature-auth", "feature-ui", "develop"]
- .into_iter()
- .filter(|name| name != &branch_to_delete)
- .collect::<HashSet<_>>()
- );
+ assert_eq!(branches, expected_branches);
})
});
}
@@ -1577,6 +1581,35 @@ mod tests {
});
cx.run_until_parked();
+ let expected_branches = [
+ "origin/main",
+ "origin/feature-auth",
+ "fork/feature-ui",
+ "private/develop",
+ ]
+ .into_iter()
+ .filter(|name| name != &branch_to_delete)
+ .collect::<HashSet<_>>();
+ let repo_branches = branch_list
+ .update(cx, |branch_list, cx| {
+ branch_list.picker.update(cx, |picker, cx| {
+ picker
+ .delegate
+ .repo
+ .as_ref()
+ .unwrap()
+ .update(cx, |repo, _cx| repo.branches())
+ })
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ let repo_branches = repo_branches
+ .iter()
+ .map(|b| b.name())
+ .collect::<HashSet<_>>();
+ assert_eq!(&repo_branches, &expected_branches);
+
// Check matches, it should match one less branch than before
branch_list.update(cx, move |branch_list, cx| {
branch_list.picker.update(cx, move |picker, _cx| {
@@ -1587,18 +1620,7 @@ mod tests {
.iter()
.map(|be| be.name())
.collect::<HashSet<_>>();
- assert_eq!(
- branches,
- [
- "origin/main",
- "origin/feature-auth",
- "fork/feature-ui",
- "private/develop"
- ]
- .into_iter()
- .filter(|name| name != &branch_to_delete)
- .collect::<HashSet<_>>()
- );
+ assert_eq!(branches, expected_branches);
})
});
}
@@ -11,7 +11,7 @@ use gpui::{
};
use language::{Anchor, Buffer, BufferId};
use project::{
- ConflictRegion, ConflictSet, ConflictSetUpdate, ProjectItem as _,
+ ConflictRegion, ConflictSet, ConflictSetUpdate, Project, ProjectItem as _,
git_store::{GitStoreEvent, RepositoryEvent},
};
use settings::Settings;
@@ -497,8 +497,7 @@ fn render_conflict_buttons(
.into_any()
}
-fn collect_conflicted_file_paths(workspace: &Workspace, cx: &App) -> Vec<String> {
- let project = workspace.project().read(cx);
+fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec<String> {
let git_store = project.git_store().read(cx);
let mut paths = Vec::new();
@@ -534,7 +533,11 @@ pub(crate) fn register_conflict_notification(
GitStoreEvent::ConflictsUpdated
| GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _)
);
- if !AgentSettings::get_global(cx).enabled || !conflicts_changed {
+ if !AgentSettings::get_global(cx).enabled(cx) || !conflicts_changed {
+ return;
+ }
+ let project = workspace.project().read(cx);
+ if project.is_via_collab() {
return;
}
@@ -542,7 +545,7 @@ pub(crate) fn register_conflict_notification(
return;
}
- let paths = collect_conflicted_file_paths(workspace, cx);
+ let paths = collect_conflicted_file_paths(project, cx);
let notification_id = workspace::merge_conflict_notification_id();
let current_paths_set: HashSet<String> = paths.iter().cloned().collect();
@@ -556,11 +559,10 @@ pub(crate) fn register_conflict_notification(
let file_count = paths.len();
workspace.show_notification(notification_id, cx, |cx| {
cx.new(|cx| {
- let message = if file_count == 1 {
- "1 file has unresolved merge conflicts".to_string()
- } else {
- format!("{file_count} files have unresolved merge conflicts")
- };
+ let message = format!(
+ "{file_count} file{} have unresolved merge conflicts",
+ if file_count == 1 { "" } else { "s" }
+ );
MessageNotification::new(message, cx)
.primary_message("Resolve with Agent")
@@ -14,7 +14,7 @@ use anyhow::Context as _;
use askpass::AskPassDelegate;
use cloud_llm_client::CompletionIntent;
use collections::{BTreeMap, HashMap, HashSet};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::{
Direction, Editor, EditorElement, EditorMode, MultiBuffer, MultiBufferOffset,
actions::ExpandAllDiffHunks,
@@ -928,6 +928,7 @@ impl GitPanel {
let width = self.width;
let amend_pending = self.amend_pending;
let signoff_enabled = self.signoff_enabled;
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = cx.spawn(async move |git_panel, cx| {
cx.background_executor()
@@ -948,16 +949,15 @@ impl GitPanel {
};
cx.background_spawn(
async move {
- KEY_VALUE_STORE
- .write_kvp(
- serialization_key,
- serde_json::to_string(&SerializedGitPanel {
- width,
- amend_pending,
- signoff_enabled,
- })?,
- )
- .await?;
+ kvp.write_kvp(
+ serialization_key,
+ serde_json::to_string(&SerializedGitPanel {
+ width,
+ amend_pending,
+ signoff_enabled,
+ })?,
+ )
+ .await?;
anyhow::Ok(())
}
.log_err(),
@@ -5542,12 +5542,14 @@ impl GitPanel {
mut cx: AsyncWindowContext,
) -> anyhow::Result<Entity<Self>> {
let serialized_panel = match workspace
- .read_with(&cx, |workspace, _| Self::serialization_key(workspace))
+ .read_with(&cx, |workspace, cx| {
+ Self::serialization_key(workspace).map(|key| (key, KeyValueStore::global(cx)))
+ })
.ok()
.flatten()
{
- Some(serialization_key) => cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
+ Some((serialization_key, kvp)) => cx
+ .background_spawn(async move { kvp.read_kvp(&serialization_key) })
.await
.context("loading git panel")
.log_err()
@@ -5824,6 +5826,10 @@ impl Panel for GitPanel {
Box::new(ToggleFocus)
}
+ fn starts_open(&self, _: &Window, cx: &App) -> bool {
+ GitPanelSettings::get_global(cx).starts_open
+ }
+
fn activation_priority(&self) -> u32 {
2
}
@@ -29,6 +29,7 @@ pub struct GitPanelSettings {
pub tree_view: bool,
pub diff_stats: bool,
pub show_count_badge: bool,
+ pub starts_open: bool,
}
impl ScrollbarVisibility for GitPanelSettings {
@@ -66,6 +67,7 @@ impl Settings for GitPanelSettings {
tree_view: git_panel.tree_view.unwrap(),
diff_stats: git_panel.diff_stats.unwrap(),
show_count_badge: git_panel.show_count_badge.unwrap(),
+ starts_open: git_panel.starts_open.unwrap(),
}
}
}
@@ -25,8 +25,8 @@ actions!(
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum GitPickerTab {
- Branches,
Worktrees,
+ Branches,
Stash,
}
@@ -190,9 +190,9 @@ impl GitPicker {
fn activate_next_tab(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.tab = match self.tab {
- GitPickerTab::Branches => GitPickerTab::Worktrees,
- GitPickerTab::Worktrees => GitPickerTab::Stash,
- GitPickerTab::Stash => GitPickerTab::Branches,
+ GitPickerTab::Worktrees => GitPickerTab::Branches,
+ GitPickerTab::Branches => GitPickerTab::Stash,
+ GitPickerTab::Stash => GitPickerTab::Worktrees,
};
self.ensure_active_picker(window, cx);
self.focus_active_picker(window, cx);
@@ -201,9 +201,9 @@ impl GitPicker {
fn activate_previous_tab(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.tab = match self.tab {
- GitPickerTab::Branches => GitPickerTab::Stash,
- GitPickerTab::Worktrees => GitPickerTab::Branches,
- GitPickerTab::Stash => GitPickerTab::Worktrees,
+ GitPickerTab::Worktrees => GitPickerTab::Stash,
+ GitPickerTab::Branches => GitPickerTab::Worktrees,
+ GitPickerTab::Stash => GitPickerTab::Branches,
};
self.ensure_active_picker(window, cx);
self.focus_active_picker(window, cx);
@@ -241,9 +241,9 @@ impl GitPicker {
"git-picker-tabs",
[
ToggleButtonSimple::new(
- GitPickerTab::Branches.to_string(),
+ GitPickerTab::Worktrees.to_string(),
cx.listener(|this, _, window, cx| {
- this.tab = GitPickerTab::Branches;
+ this.tab = GitPickerTab::Worktrees;
this.ensure_active_picker(window, cx);
this.focus_active_picker(window, cx);
cx.notify();
@@ -251,16 +251,16 @@ impl GitPicker {
)
.tooltip(move |_, cx| {
Tooltip::for_action_in(
- "Toggle Branch Picker",
- &ActivateBranchesTab,
- &branches_focus_handle,
+ "Toggle Worktree Picker",
+ &ActivateWorktreesTab,
+ &worktrees_focus_handle,
cx,
)
}),
ToggleButtonSimple::new(
- GitPickerTab::Worktrees.to_string(),
+ GitPickerTab::Branches.to_string(),
cx.listener(|this, _, window, cx| {
- this.tab = GitPickerTab::Worktrees;
+ this.tab = GitPickerTab::Branches;
this.ensure_active_picker(window, cx);
this.focus_active_picker(window, cx);
cx.notify();
@@ -268,9 +268,9 @@ impl GitPicker {
)
.tooltip(move |_, cx| {
Tooltip::for_action_in(
- "Toggle Worktree Picker",
- &ActivateWorktreesTab,
- &worktrees_focus_handle,
+ "Toggle Branch Picker",
+ &ActivateBranchesTab,
+ &branches_focus_handle,
cx,
)
}),
@@ -297,8 +297,8 @@ impl GitPicker {
.style(ToggleButtonGroupStyle::Outlined)
.auto_width()
.selected_index(match self.tab {
- GitPickerTab::Branches => 0,
- GitPickerTab::Worktrees => 1,
+ GitPickerTab::Worktrees => 0,
+ GitPickerTab::Branches => 1,
GitPickerTab::Stash => 2,
}),
)
@@ -295,11 +295,12 @@ pub fn resolve_active_repository(workspace: &Workspace, cx: &App) -> Option<Enti
git_store
.repositories()
.values()
- .find(|repo| {
+ .filter(|repo| {
let repo_path = &repo.read(cx).work_directory_abs_path;
*repo_path == worktree_abs_path
|| worktree_abs_path.starts_with(repo_path.as_ref())
})
+ .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
.cloned()
})
})
@@ -1219,8 +1219,9 @@ impl SerializableItem for ProjectDiff {
window: &mut Window,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
+ let db = persistence::ProjectDiffDb::global(cx);
window.spawn(cx, async move |cx| {
- let diff_base = persistence::PROJECT_DIFF_DB.get_diff_base(item_id, workspace_id)?;
+ let diff_base = db.get_diff_base(item_id, workspace_id)?;
let diff = cx.update(|window, cx| {
let branch_diff = cx
@@ -1246,10 +1247,10 @@ impl SerializableItem for ProjectDiff {
let workspace_id = workspace.database_id()?;
let diff_base = self.diff_base(cx).clone();
+ let db = persistence::ProjectDiffDb::global(cx);
Some(cx.background_spawn({
async move {
- persistence::PROJECT_DIFF_DB
- .save_diff_base(item_id, workspace_id, diff_base.clone())
+ db.save_diff_base(item_id, workspace_id, diff_base.clone())
.await
}
}))
@@ -1289,7 +1290,7 @@ mod persistence {
)];
}
- db::static_connection!(PROJECT_DIFF_DB, ProjectDiffDb, [WorkspaceDb]);
+ db::static_connection!(ProjectDiffDb, [WorkspaceDb]);
impl ProjectDiffDb {
pub async fn save_diff_base(
@@ -2,7 +2,10 @@
use anyhow::Result;
use buffer_diff::BufferDiff;
-use editor::{Editor, EditorEvent, MultiBuffer, ToPoint, actions::DiffClipboardWithSelectionData};
+use editor::{
+ Editor, EditorEvent, EditorSettings, MultiBuffer, SplittableEditor, ToPoint,
+ actions::DiffClipboardWithSelectionData,
+};
use futures::{FutureExt, select_biased};
use gpui::{
AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle,
@@ -10,6 +13,7 @@ use gpui::{
};
use language::{self, Buffer, Point};
use project::Project;
+use settings::Settings;
use std::{
any::{Any, TypeId},
cmp,
@@ -22,13 +26,13 @@ use ui::{Color, Icon, IconName, Label, LabelCommon as _, SharedString};
use util::paths::PathExt;
use workspace::{
- Item, ItemHandle as _, ItemNavHistory, Workspace,
+ Item, ItemNavHistory, Workspace,
item::{ItemEvent, SaveOptions, TabContentParams},
searchable::SearchableItemHandle,
};
pub struct TextDiffView {
- diff_editor: Entity<Editor>,
+ diff_editor: Entity<SplittableEditor>,
title: SharedString,
path: Option<SharedString>,
buffer_changes_tx: watch::Sender<()>,
@@ -47,11 +51,24 @@ impl TextDiffView {
let source_editor = diff_data.editor.clone();
let selection_data = source_editor.update(cx, |editor, cx| {
- let multibuffer = editor.buffer().read(cx);
- let source_buffer = multibuffer.as_singleton()?;
+ let multibuffer = editor.buffer();
let selections = editor.selections.all::<Point>(&editor.display_snapshot(cx));
- let buffer_snapshot = source_buffer.read(cx);
let first_selection = selections.first()?;
+
+ let (source_buffer, buffer_start, start_excerpt) = multibuffer
+ .read(cx)
+ .point_to_buffer_point(first_selection.start, cx)?;
+ let buffer_end = multibuffer
+ .read(cx)
+ .point_to_buffer_point(first_selection.end, cx)
+ .and_then(|(buf, pt, end_excerpt)| {
+ (buf.read(cx).remote_id() == source_buffer.read(cx).remote_id()
+ && end_excerpt == start_excerpt)
+ .then_some(pt)
+ })
+ .unwrap_or(buffer_start);
+
+ let buffer_snapshot = source_buffer.read(cx);
let max_point = buffer_snapshot.max_point();
if first_selection.is_empty() {
@@ -59,15 +76,12 @@ impl TextDiffView {
return Some((source_buffer, full_range));
}
- let start = first_selection.start;
- let end = first_selection.end;
- let expanded_start = Point::new(start.row, 0);
-
- let expanded_end = if end.column > 0 {
- let next_row = end.row + 1;
+ let expanded_start = Point::new(buffer_start.row, 0);
+ let expanded_end = if buffer_end.column > 0 {
+ let next_row = buffer_end.row + 1;
cmp::min(max_point, Point::new(next_row, 0))
} else {
- end
+ buffer_end
};
Some((source_buffer, expanded_start..expanded_end))
});
@@ -78,11 +92,24 @@ impl TextDiffView {
};
source_editor.update(cx, |source_editor, cx| {
- source_editor.change_selections(Default::default(), window, cx, |s| {
- s.select_ranges(vec![
- expanded_selection_range.start..expanded_selection_range.end,
- ]);
- })
+ let multibuffer = source_editor.buffer();
+ let mb_range = {
+ let mb = multibuffer.read(cx);
+ let start_anchor =
+ mb.buffer_point_to_anchor(&source_buffer, expanded_selection_range.start, cx);
+ let end_anchor =
+ mb.buffer_point_to_anchor(&source_buffer, expanded_selection_range.end, cx);
+ start_anchor.zip(end_anchor).map(|(s, e)| {
+ let snapshot = mb.snapshot(cx);
+ s.to_point(&snapshot)..e.to_point(&snapshot)
+ })
+ };
+
+ if let Some(range) = mb_range {
+ source_editor.change_selections(Default::default(), window, cx, |s| {
+ s.select_ranges(vec![range]);
+ });
+ }
});
let source_buffer_snapshot = source_buffer.read(cx).snapshot();
@@ -102,11 +129,11 @@ impl TextDiffView {
);
let task = window.spawn(cx, async move |cx| {
- let project = workspace.update(cx, |workspace, _| workspace.project().clone())?;
-
update_diff_buffer(&diff_buffer, &source_buffer, &clipboard_buffer, cx).await?;
workspace.update_in(cx, |workspace, window, cx| {
+ let project = workspace.project().clone();
+ let workspace_entity = cx.entity();
let diff_view = cx.new(|cx| {
TextDiffView::new(
clipboard_buffer,
@@ -115,6 +142,7 @@ impl TextDiffView {
expanded_selection_range,
diff_buffer,
project,
+ workspace_entity,
window,
cx,
)
@@ -139,6 +167,7 @@ impl TextDiffView {
source_range: Range<Point>,
diff_buffer: Entity<BufferDiff>,
project: Entity<Project>,
+ workspace: Entity<Workspace>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -151,15 +180,24 @@ impl TextDiffView {
multibuffer
});
let diff_editor = cx.new(|cx| {
- let mut editor = Editor::for_multibuffer(multibuffer, Some(project), window, cx);
- editor.start_temporary_diff_override();
- editor.disable_diagnostics(cx);
- editor.set_expand_all_diff_hunks(cx);
- editor.set_render_diff_hunk_controls(
+ let splittable = SplittableEditor::new(
+ EditorSettings::get_global(cx).diff_view_style,
+ multibuffer,
+ project,
+ workspace,
+ window,
+ cx,
+ );
+ splittable.set_render_diff_hunk_controls(
Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()),
cx,
);
- editor
+ splittable.rhs_editor().update(cx, |editor, cx| {
+ editor.start_temporary_diff_override();
+ editor.disable_diagnostics(cx);
+ editor.set_expand_all_diff_hunks(cx);
+ });
+ splittable
});
let (buffer_changes_tx, mut buffer_changes_rx) = watch::channel(());
@@ -329,12 +367,14 @@ impl Item for TextDiffView {
&'a self,
type_id: TypeId,
self_handle: &'a Entity<Self>,
- _: &'a App,
+ cx: &'a App,
) -> Option<gpui::AnyEntity> {
if type_id == TypeId::of::<Self>() {
Some(self_handle.clone().into())
- } else if type_id == TypeId::of::<Editor>() {
+ } else if type_id == TypeId::of::<SplittableEditor>() {
Some(self.diff_editor.clone().into())
+ } else if type_id == TypeId::of::<Editor>() {
+ Some(self.diff_editor.read(cx).rhs_editor().clone().into())
} else {
None
}
@@ -349,7 +389,7 @@ impl Item for TextDiffView {
cx: &App,
f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem),
) {
- self.diff_editor.for_each_project_item(cx, f)
+ self.diff_editor.read(cx).for_each_project_item(cx, f)
}
fn set_nav_history(
@@ -358,7 +398,8 @@ impl Item for TextDiffView {
_: &mut Window,
cx: &mut Context<Self>,
) {
- self.diff_editor.update(cx, |editor, _| {
+ let rhs = self.diff_editor.read(cx).rhs_editor().clone();
+ rhs.update(cx, |editor, _| {
editor.set_nav_history(Some(nav_history));
});
}
@@ -439,11 +480,12 @@ impl Render for TextDiffView {
#[cfg(test)]
mod tests {
use super::*;
- use editor::{MultiBufferOffset, test::editor_test_context::assert_state_with_diff};
- use gpui::{TestAppContext, VisualContext};
+ use editor::{MultiBufferOffset, PathKey, test::editor_test_context::assert_state_with_diff};
+ use gpui::{BorrowAppContext, TestAppContext, VisualContext};
+ use language::Point;
use project::{FakeFs, Project};
use serde_json::json;
- use settings::SettingsStore;
+ use settings::{DiffViewStyle, SettingsStore};
use unindent::unindent;
use util::{path, test::marked_text_ranges};
use workspace::MultiWorkspace;
@@ -452,6 +494,11 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.editor.diff_view_style = Some(DiffViewStyle::Unified);
+ });
+ });
theme::init(theme::LoadThemes::JustBase, cx);
});
}
@@ -643,6 +690,185 @@ mod tests {
.await;
}
+ #[gpui::test]
+ async fn test_diffing_clipboard_from_multibuffer_with_selection(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ "a.txt": "alpha\nbeta\ngamma",
+ "b.txt": "one\ntwo\nthree"
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
+
+ let buffer_a = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/project/a.txt"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_b = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/project/b.txt"), cx)
+ })
+ .await
+ .unwrap();
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+
+ let editor = cx.new_window_entity(|window, cx| {
+ let multibuffer = cx.new(|cx| {
+ let mut mb = MultiBuffer::new(language::Capability::ReadWrite);
+ mb.set_excerpts_for_path(
+ PathKey::sorted(0),
+ buffer_a.clone(),
+ [Point::new(0, 0)..Point::new(2, 5)],
+ 0,
+ cx,
+ );
+ mb.set_excerpts_for_path(
+ PathKey::sorted(1),
+ buffer_b.clone(),
+ [Point::new(0, 0)..Point::new(2, 5)],
+ 0,
+ cx,
+ );
+ mb
+ });
+
+ let mut editor =
+ Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx);
+ // Select "beta" inside the first excerpt
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.select_ranges([MultiBufferOffset(6)..MultiBufferOffset(10)]);
+ });
+ editor
+ });
+
+ let diff_view = workspace
+ .update_in(cx, |workspace, window, cx| {
+ TextDiffView::open(
+ &DiffClipboardWithSelectionData {
+ clipboard_text: "REPLACED".to_string(),
+ editor,
+ },
+ workspace,
+ window,
+ cx,
+ )
+ })
+ .unwrap()
+ .await
+ .unwrap();
+
+ cx.executor().run_until_parked();
+
+ diff_view.read_with(cx, |diff_view, _cx| {
+ assert!(
+ diff_view.title.contains("Clipboard"),
+ "diff view should have opened with a clipboard diff title, got: {}",
+ diff_view.title
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_diffing_clipboard_from_multibuffer_with_empty_selection(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ "a.txt": "alpha\nbeta\ngamma",
+ "b.txt": "one\ntwo\nthree"
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
+
+ let buffer_a = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/project/a.txt"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_b = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/project/b.txt"), cx)
+ })
+ .await
+ .unwrap();
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+
+ let editor = cx.new_window_entity(|window, cx| {
+ let multibuffer = cx.new(|cx| {
+ let mut mb = MultiBuffer::new(language::Capability::ReadWrite);
+ mb.set_excerpts_for_path(
+ PathKey::sorted(0),
+ buffer_a.clone(),
+ [Point::new(0, 0)..Point::new(2, 5)],
+ 0,
+ cx,
+ );
+ mb.set_excerpts_for_path(
+ PathKey::sorted(1),
+ buffer_b.clone(),
+ [Point::new(0, 0)..Point::new(2, 5)],
+ 0,
+ cx,
+ );
+ mb
+ });
+
+ let mut editor =
+ Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx);
+ // Cursor inside the first excerpt (no selection)
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.select_ranges([MultiBufferOffset(6)..MultiBufferOffset(6)]);
+ });
+ editor
+ });
+
+ let diff_view = workspace
+ .update_in(cx, |workspace, window, cx| {
+ TextDiffView::open(
+ &DiffClipboardWithSelectionData {
+ clipboard_text: "REPLACED".to_string(),
+ editor,
+ },
+ workspace,
+ window,
+ cx,
+ )
+ })
+ .unwrap()
+ .await
+ .unwrap();
+
+ cx.executor().run_until_parked();
+
+ // Empty selection should diff the full underlying buffer
+ diff_view.read_with(cx, |diff_view, _cx| {
+ assert!(
+ diff_view.title.contains("Clipboard"),
+ "diff view should have opened with a clipboard diff title, got: {}",
+ diff_view.title
+ );
+ });
+ }
+
async fn base_test(
project_root: &str,
file_path: &str,
@@ -715,7 +941,9 @@ mod tests {
cx.executor().run_until_parked();
assert_state_with_diff(
- &diff_view.read_with(cx, |diff_view, _| diff_view.diff_editor.clone()),
+ &diff_view.read_with(cx, |diff_view, cx| {
+ diff_view.diff_editor.read(cx).rhs_editor().clone()
+ }),
cx,
expected_diff,
);
@@ -2,7 +2,7 @@ use anyhow::Context as _;
use collections::HashSet;
use fuzzy::StringMatchCandidate;
-use git::repository::{Worktree as GitWorktree, validate_worktree_directory};
+use git::repository::Worktree as GitWorktree;
use gpui::{
Action, App, AsyncWindowContext, Context, DismissEvent, Entity, EventEmitter, FocusHandle,
Focusable, InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement,
@@ -96,9 +96,12 @@ impl WorktreeList {
});
cx.spawn_in(window, async move |this, cx| {
- let all_worktrees = all_worktrees_request
+ let all_worktrees: Vec<_> = all_worktrees_request
.context("No active repository")?
- .await??;
+ .await??
+ .into_iter()
+ .filter(|worktree| worktree.ref_name.is_some()) // hide worktrees without a branch
+ .collect();
let default_branch = default_branch_request
.context("No active repository")?
@@ -182,7 +185,7 @@ impl WorktreeList {
return;
}
picker.delegate.create_worktree(
- entry.worktree.branch(),
+ entry.worktree.display_name(),
replace_current_window,
Some(default_branch.into()),
window,
@@ -300,11 +303,10 @@ impl WorktreeListDelegate {
.git
.worktree_directory
.clone();
- let original_repo = repo.original_repo_abs_path.clone();
- let directory =
- validate_worktree_directory(&original_repo, &worktree_directory_setting)?;
- let new_worktree_path = directory.join(&branch);
- let receiver = repo.create_worktree(branch.clone(), directory, commit);
+ let new_worktree_path =
+ repo.path_for_new_linked_worktree(&branch, &worktree_directory_setting)?;
+ let receiver =
+ repo.create_worktree(branch.clone(), new_worktree_path.clone(), commit);
anyhow::Ok((receiver, new_worktree_path))
})?;
receiver.await??;
@@ -650,7 +652,7 @@ impl PickerDelegate for WorktreeListDelegate {
let candidates = all_worktrees
.iter()
.enumerate()
- .map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.branch()))
+ .map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.display_name()))
.collect::<Vec<StringMatchCandidate>>();
fuzzy::match_strings(
&candidates,
@@ -675,13 +677,13 @@ impl PickerDelegate for WorktreeListDelegate {
if !query.is_empty()
&& !matches
.first()
- .is_some_and(|entry| entry.worktree.branch() == query)
+ .is_some_and(|entry| entry.worktree.display_name() == query)
{
let query = query.replace(' ', "-");
matches.push(WorktreeEntry {
worktree: GitWorktree {
path: Default::default(),
- ref_name: format!("refs/heads/{query}").into(),
+ ref_name: Some(format!("refs/heads/{query}").into()),
sha: Default::default(),
},
positions: Vec::new(),
@@ -707,7 +709,7 @@ impl PickerDelegate for WorktreeListDelegate {
return;
};
if entry.is_new {
- self.create_worktree(&entry.worktree.branch(), secondary, None, window, cx);
+ self.create_worktree(&entry.worktree.display_name(), secondary, None, window, cx);
} else {
self.open_worktree(&entry.worktree.path, secondary, window, cx);
}
@@ -738,16 +740,19 @@ impl PickerDelegate for WorktreeListDelegate {
let (branch_name, sublabel) = if entry.is_new {
(
- Label::new(format!("Create Worktree: \"{}\"…", entry.worktree.branch()))
- .truncate()
- .into_any_element(),
+ Label::new(format!(
+ "Create Worktree: \"{}\"…",
+ entry.worktree.display_name()
+ ))
+ .truncate()
+ .into_any_element(),
format!(
"based off {}",
self.base_branch(cx).unwrap_or("the current branch")
),
)
} else {
- let branch = entry.worktree.branch();
+ let branch = entry.worktree.display_name();
let branch_first_line = branch.lines().next().unwrap_or(branch);
let positions: Vec<_> = entry
.positions
@@ -144,7 +144,7 @@ windows = { version = "0.61", features = ["Win32_Foundation"] }
backtrace.workspace = true
collections = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
-gpui_platform.workspace = true
+gpui_platform = { workspace = true, features = ["font-kit"] }
lyon = { version = "1.0", features = ["extra"] }
rand.workspace = true
scheduler = { workspace = true, features = ["test-support"] }
@@ -181,6 +181,7 @@ fn run_example() {
cx.set_menus(vec![Menu {
name: "Image".into(),
items: vec![MenuItem::action("Quit", Quit)],
+ disabled: false,
}]);
let window_options = WindowOptions {
@@ -273,10 +273,7 @@ fn run_example() {
cx.activate(true);
cx.on_action(|_: &Quit, cx| cx.quit());
cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
- cx.set_menus(vec![Menu {
- name: "Image Gallery".into(),
- items: vec![MenuItem::action("Quit", Quit)],
- }]);
+ cx.set_menus([Menu::new("Image Gallery").items([MenuItem::action("Quit", Quit)])]);
let window_options = WindowOptions {
titlebar: Some(TitlebarOptions {
@@ -56,21 +56,23 @@ impl HelloWorld {
}))
.when(self.secondary_open, |this| {
this.child(
- // GPUI can't support deferred here yet,
- // it was inside another deferred element.
- anchored()
- .anchor(Corner::TopLeft)
- .snap_to_window_with_margin(px(8.))
- .child(
- popover()
- .child("This is second level Popover")
- .bg(gpui::white())
- .border_color(gpui::blue())
- .on_mouse_down_out(cx.listener(|this, _, _, cx| {
- this.secondary_open = false;
- cx.notify();
- })),
- ),
+ // Now GPUI supports nested deferred!
+ deferred(
+ anchored()
+ .anchor(Corner::TopLeft)
+ .snap_to_window_with_margin(px(8.))
+ .child(
+ popover()
+ .child("This is second level Popover with nested deferred!")
+ .bg(gpui::white())
+ .border_color(gpui::blue())
+ .on_mouse_down_out(cx.listener(|this, _, _, cx| {
+ this.secondary_open = false;
+ cx.notify();
+ })),
+ ),
+ )
+ .priority(2),
)
})
}
@@ -2,7 +2,7 @@
use gpui::{
App, Context, Global, Menu, MenuItem, SharedString, SystemMenuType, Window, WindowOptions,
- actions, div, prelude::*, rgb,
+ actions, div, prelude::*,
};
use gpui_platform::application;
@@ -12,12 +12,12 @@ impl Render for SetMenus {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
div()
.flex()
- .bg(rgb(0x2e7d32))
+ .bg(gpui::white())
.size_full()
.justify_center()
.items_center()
.text_xl()
- .text_color(rgb(0xffffff))
+ .text_color(gpui::black())
.child("Set Menus Example")
}
}
@@ -28,7 +28,8 @@ fn run_example() {
// Bring the menu bar to the foreground (so you can see the menu bar)
cx.activate(true);
- // Register the `quit` function so it can be referenced by the `MenuItem::action` in the menu bar
+ // Register the `quit` function so it can be referenced
+ // by the `MenuItem::action` in the menu bar
cx.on_action(quit);
cx.on_action(toggle_check);
// Add menu items
@@ -91,19 +92,24 @@ impl Global for AppState {}
fn set_app_menus(cx: &mut App) {
let app_state = cx.global::<AppState>();
- cx.set_menus(vec![Menu {
- name: "set_menus".into(),
- items: vec![
- MenuItem::os_submenu("Services", SystemMenuType::Services),
- MenuItem::separator(),
- MenuItem::action(ViewMode::List, ToggleCheck)
- .checked(app_state.view_mode == ViewMode::List),
- MenuItem::action(ViewMode::Grid, ToggleCheck)
- .checked(app_state.view_mode == ViewMode::Grid),
- MenuItem::separator(),
- MenuItem::action("Quit", Quit),
- ],
- }]);
+ cx.set_menus([Menu::new("set_menus").items([
+ MenuItem::os_submenu("Services", SystemMenuType::Services),
+ MenuItem::separator(),
+ MenuItem::action("Disabled Item", gpui::NoAction).disabled(true),
+ MenuItem::submenu(Menu::new("Disabled Submenu").disabled(true)),
+ MenuItem::separator(),
+ MenuItem::action("List Mode", ToggleCheck).checked(app_state.view_mode == ViewMode::List),
+ MenuItem::submenu(
+ Menu::new("Mode").items([
+ MenuItem::action(ViewMode::List, ToggleCheck)
+ .checked(app_state.view_mode == ViewMode::List),
+ MenuItem::action(ViewMode::Grid, ToggleCheck)
+ .checked(app_state.view_mode == ViewMode::Grid),
+ ]),
+ ),
+ MenuItem::separator(),
+ MenuItem::action("Quit", Quit),
+ ])]);
}
// Associate actions using the `actions!` macro (or `Action` derive macro)
@@ -111,7 +117,7 @@ actions!(set_menus, [Quit, ToggleCheck]);
// Define the quit function that is registered with the App
fn quit(_: &Quit, cx: &mut App) {
- println!("Gracefully quitting the application . . .");
+ println!("Gracefully quitting the application...");
cx.quit();
}
@@ -1,6 +1,7 @@
#![cfg_attr(target_family = "wasm", no_main)]
use std::{
+ borrow::Cow,
ops::{Deref, DerefMut},
sync::Arc,
};
@@ -204,7 +205,7 @@ impl RenderOnce for CharacterGrid {
"❮", "<=", "!=", "==", "--", "++", "=>", "->", "🏀", "🎊", "😍", "❤️", "👍", "👎",
];
- let columns = 11;
+ let columns = 20;
let rows = characters.len().div_ceil(columns);
let grid_rows = (0..rows).map(|row_idx| {
@@ -238,6 +239,7 @@ impl RenderOnce for CharacterGrid {
struct TextExample {
next_id: usize,
+ font_family: SharedString,
}
impl TextExample {
@@ -245,8 +247,33 @@ impl TextExample {
self.next_id += 1;
self.next_id
}
+
+ fn button(
+ text: &str,
+ cx: &mut Context<Self>,
+ on_click: impl Fn(&mut Self, &mut Context<Self>) + 'static,
+ ) -> impl IntoElement {
+ div()
+ .id(text.to_string())
+ .flex_none()
+ .child(text.to_string())
+ .bg(gpui::black())
+ .text_color(gpui::white())
+ .active(|this| this.opacity(0.8))
+ .px_3()
+ .py_1()
+ .on_click(cx.listener(move |this, _, _, cx| on_click(this, cx)))
+ }
}
+const FONT_FAMILIES: [&str; 5] = [
+ ".ZedMono",
+ ".SystemUIFont",
+ "Menlo",
+ "Monaco",
+ "Courier New",
+];
+
impl Render for TextExample {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let tcx = cx.text_context();
@@ -265,7 +292,26 @@ impl Render for TextExample {
let step_up_6 = step_up_5 * type_scale;
div()
+ .font_family(self.font_family.clone())
.size_full()
+ .child(
+ div()
+ .bg(gpui::white())
+ .border_b_1()
+ .border_color(gpui::black())
+ .p_3()
+ .flex()
+ .child(Self::button(&self.font_family, cx, |this, cx| {
+ let new_family = FONT_FAMILIES
+ .iter()
+ .position(|f| *f == this.font_family.as_str())
+ .map(|idx| FONT_FAMILIES[(idx + 1) % FONT_FAMILIES.len()])
+ .unwrap_or(FONT_FAMILIES[0]);
+
+ this.font_family = SharedString::new(new_family);
+ cx.notify();
+ })),
+ )
.child(
div()
.id("text-example")
@@ -304,9 +350,19 @@ fn run_example() {
application().run(|cx: &mut App| {
cx.set_menus(vec![Menu {
name: "GPUI Typography".into(),
+ disabled: false,
items: vec![],
}]);
+ let fonts = [include_bytes!(
+ "../../../assets/fonts/lilex/Lilex-Regular.ttf"
+ )]
+ .iter()
+ .map(|b| Cow::Borrowed(&b[..]))
+ .collect();
+
+ _ = cx.text_system().add_fonts(fonts);
+
cx.init_colors();
cx.set_global(GlobalTextContext(Arc::new(TextContext::default())));
@@ -323,7 +379,12 @@ fn run_example() {
))),
..Default::default()
},
- |_window, cx| cx.new(|_cx| TextExample { next_id: 0 }),
+ |_window, cx| {
+ cx.new(|_cx| TextExample {
+ next_id: 0,
+ font_family: ".ZedMono".into(),
+ })
+ },
)
.unwrap();
@@ -1,7 +1,7 @@
use anyhow::{Context as _, Result};
use collections::HashMap;
pub use gpui_macros::Action;
-pub use no_action::{NoAction, is_no_action};
+pub use no_action::{NoAction, Unbind, is_no_action, is_unbind};
use serde_json::json;
use std::{
any::{Any, TypeId},
@@ -290,19 +290,6 @@ impl ActionRegistry {
}
}
- #[cfg(test)]
- pub(crate) fn load_action<A: Action>(&mut self) {
- self.insert_action(MacroActionData {
- name: A::name_for_type(),
- type_id: TypeId::of::<A>(),
- build: A::build,
- json_schema: A::action_json_schema,
- deprecated_aliases: A::deprecated_aliases(),
- deprecation_message: A::deprecation_message(),
- documentation: A::documentation(),
- });
- }
-
fn insert_action(&mut self, action: MacroActionData) {
let name = action.name;
if self.by_name.contains_key(name) {
@@ -432,7 +419,8 @@ pub fn generate_list_of_all_registered_actions() -> impl Iterator<Item = MacroAc
mod no_action {
use crate as gpui;
- use std::any::Any as _;
+ use schemars::JsonSchema;
+ use serde::Deserialize;
actions!(
zed,
@@ -443,8 +431,23 @@ mod no_action {
]
);
+ /// Action with special handling which unbinds later bindings for the same keystrokes when they
+ /// dispatch the named action, regardless of that action's context.
+ ///
+ /// In keymap JSON this is written as:
+ ///
+ /// `["zed::Unbind", "editor::NewLine"]`
+ #[derive(Clone, Debug, PartialEq, Deserialize, JsonSchema, gpui::Action)]
+ #[action(namespace = zed)]
+ pub struct Unbind(pub gpui::SharedString);
+
/// Returns whether or not this action represents a removed key binding.
pub fn is_no_action(action: &dyn gpui::Action) -> bool {
- action.as_any().type_id() == (NoAction {}).type_id()
+ action.as_any().is::<NoAction>()
+ }
+
+ /// Returns whether or not this action represents an unbind marker.
+ pub fn is_unbind(action: &dyn gpui::Action) -> bool {
+ action.as_any().is::<Unbind>()
}
}
@@ -579,21 +579,13 @@ impl GpuiMode {
pub struct App {
pub(crate) this: Weak<AppCell>,
pub(crate) platform: Rc<dyn Platform>,
- pub(crate) mode: GpuiMode,
text_system: Arc<TextSystem>,
- flushing_effects: bool,
- pending_updates: usize,
+
pub(crate) actions: Rc<ActionRegistry>,
pub(crate) active_drag: Option<AnyDrag>,
pub(crate) background_executor: BackgroundExecutor,
pub(crate) foreground_executor: ForegroundExecutor,
- pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box<dyn Any>>,
- asset_source: Arc<dyn AssetSource>,
- pub(crate) svg_renderer: SvgRenderer,
- http_client: Arc<dyn HttpClient>,
- pub(crate) globals_by_type: FxHashMap<TypeId, Box<dyn Any>>,
pub(crate) entities: EntityMap,
- pub(crate) window_update_stack: Vec<WindowId>,
pub(crate) new_entity_observers: SubscriberSet<TypeId, NewEntityListener>,
pub(crate) windows: SlotMap<WindowId, Option<Box<Window>>>,
pub(crate) window_handles: FxHashMap<WindowId, AnyWindowHandle>,
@@ -604,10 +596,8 @@ pub struct App {
pub(crate) global_action_listeners:
FxHashMap<TypeId, Vec<Rc<dyn Fn(&dyn Any, DispatchPhase, &mut Self)>>>,
pending_effects: VecDeque<Effect>,
- pub(crate) pending_notifications: FxHashSet<EntityId>,
- pub(crate) pending_global_notifications: FxHashSet<TypeId>,
+
pub(crate) observers: SubscriberSet<EntityId, Handler>,
- // TypeId is the type of the event that the listener callback expects
pub(crate) event_listeners: SubscriberSet<EntityId, (TypeId, Listener)>,
pub(crate) keystroke_observers: SubscriberSet<(), KeystrokeObserver>,
pub(crate) keystroke_interceptors: SubscriberSet<(), KeystrokeObserver>,
@@ -617,8 +607,30 @@ pub struct App {
pub(crate) global_observers: SubscriberSet<TypeId, Handler>,
pub(crate) quit_observers: SubscriberSet<(), QuitHandler>,
pub(crate) restart_observers: SubscriberSet<(), Handler>,
- pub(crate) restart_path: Option<PathBuf>,
pub(crate) window_closed_observers: SubscriberSet<(), WindowClosedHandler>,
+
+ /// Per-App element arena. This isolates element allocations between different
+ /// App instances (important for tests where multiple Apps run concurrently).
+ pub(crate) element_arena: RefCell<Arena>,
+ /// Per-App event arena.
+ pub(crate) event_arena: Arena,
+
+ // Drop globals last. We need to ensure all tasks owned by entities and
+ // callbacks are marked cancelled at this point as this will also shutdown
+ // the tokio runtime. As any task attempting to spawn a blocking tokio task,
+ // might panic.
+ pub(crate) globals_by_type: FxHashMap<TypeId, Box<dyn Any>>,
+
+ // assets
+ pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box<dyn Any>>,
+ asset_source: Arc<dyn AssetSource>,
+ pub(crate) svg_renderer: SvgRenderer,
+ http_client: Arc<dyn HttpClient>,
+
+ // below is plain data, the drop order is insignificant here
+ pub(crate) pending_notifications: FxHashSet<EntityId>,
+ pub(crate) pending_global_notifications: FxHashSet<TypeId>,
+ pub(crate) restart_path: Option<PathBuf>,
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
pub(crate) propagate_event: bool,
pub(crate) prompt_builder: Option<PromptBuilder>,
@@ -632,13 +644,18 @@ pub struct App {
#[cfg(any(test, feature = "test-support", debug_assertions))]
pub(crate) name: Option<&'static str>,
pub(crate) text_rendering_mode: Rc<Cell<TextRenderingMode>>,
+
+ pub(crate) window_update_stack: Vec<WindowId>,
+ pub(crate) mode: GpuiMode,
+ flushing_effects: bool,
+ pending_updates: usize,
quit_mode: QuitMode,
quitting: bool,
- /// Per-App element arena. This isolates element allocations between different
- /// App instances (important for tests where multiple Apps run concurrently).
- pub(crate) element_arena: RefCell<Arena>,
- /// Per-App event arena.
- pub(crate) event_arena: Arena,
+
+ // We need to ensure the leak detector drops last, after all tasks, callbacks and things have been dropped.
+ // Otherwise it may report false positives.
+ #[cfg(any(test, feature = "leak-detection"))]
+ _ref_counts: Arc<RwLock<EntityRefCounts>>,
}
impl App {
@@ -660,6 +677,9 @@ impl App {
let keyboard_layout = platform.keyboard_layout();
let keyboard_mapper = platform.keyboard_mapper();
+ #[cfg(any(test, feature = "leak-detection"))]
+ let _ref_counts = entities.ref_counts_drop_handle();
+
let app = Rc::new_cyclic(|this| AppCell {
app: RefCell::new(App {
this: this.clone(),
@@ -719,6 +739,9 @@ impl App {
name: None,
element_arena: RefCell::new(Arena::new(1024 * 1024)),
event_arena: Arena::new(1024 * 1024),
+
+ #[cfg(any(test, feature = "leak-detection"))]
+ _ref_counts,
}),
});
@@ -2049,7 +2072,8 @@ impl App {
}
/// Sets the menu bar for this application. This will replace any existing menu bar.
- pub fn set_menus(&self, menus: Vec<Menu>) {
+ pub fn set_menus(&self, menus: impl IntoIterator<Item = Menu>) {
+ let menus: Vec<Menu> = menus.into_iter().collect();
self.platform.set_menus(menus, &self.keymap.borrow());
}
@@ -59,7 +59,8 @@ pub(crate) struct EntityMap {
ref_counts: Arc<RwLock<EntityRefCounts>>,
}
-struct EntityRefCounts {
+#[doc(hidden)]
+pub(crate) struct EntityRefCounts {
counts: SlotMap<EntityId, AtomicUsize>,
dropped_entity_ids: Vec<EntityId>,
#[cfg(any(test, feature = "leak-detection"))]
@@ -84,7 +85,7 @@ impl EntityMap {
}
#[doc(hidden)]
- pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> {
+ pub fn ref_counts_drop_handle(&self) -> Arc<RwLock<EntityRefCounts>> {
self.ref_counts.clone()
}
@@ -2589,7 +2589,8 @@ impl Interactivity {
let pending_mouse_down = pending_mouse_down.clone();
let source_bounds = hitbox.bounds;
move |window: &Window| {
- pending_mouse_down.borrow().is_none()
+ !window.last_input_was_keyboard()
+ && pending_mouse_down.borrow().is_none()
&& source_bounds.contains(&window.mouse_position())
}
});
@@ -629,66 +629,99 @@ mod tests {
use std::{cell::RefCell, ops::Range, rc::Rc};
use crate::{
- Action, ActionRegistry, App, Bounds, Context, DispatchTree, FocusHandle, InputHandler,
- IntoElement, KeyBinding, KeyContext, Keymap, Pixels, Point, Render, Subscription,
- TestAppContext, UTF16Selection, Window,
+ ActionRegistry, App, Bounds, Context, DispatchTree, FocusHandle, InputHandler, IntoElement,
+ KeyBinding, KeyContext, Keymap, Pixels, Point, Render, Subscription, TestAppContext,
+ UTF16Selection, Unbind, Window,
};
- #[derive(PartialEq, Eq)]
- struct TestAction;
+ actions!(dispatch_test, [TestAction, SecondaryTestAction]);
- impl Action for TestAction {
- fn name(&self) -> &'static str {
- "test::TestAction"
- }
-
- fn name_for_type() -> &'static str
- where
- Self: ::std::marker::Sized,
- {
- "test::TestAction"
- }
-
- fn partial_eq(&self, action: &dyn Action) -> bool {
- action.as_any().downcast_ref::<Self>() == Some(self)
- }
-
- fn boxed_clone(&self) -> std::boxed::Box<dyn Action> {
- Box::new(TestAction)
- }
+ fn test_dispatch_tree(bindings: Vec<KeyBinding>) -> DispatchTree {
+ let registry = ActionRegistry::default();
- fn build(_value: serde_json::Value) -> anyhow::Result<Box<dyn Action>>
- where
- Self: Sized,
- {
- Ok(Box::new(TestAction))
- }
+ DispatchTree::new(
+ Rc::new(RefCell::new(Keymap::new(bindings))),
+ Rc::new(registry),
+ )
}
#[test]
fn test_keybinding_for_action_bounds() {
- let keymap = Keymap::new(vec![KeyBinding::new(
+ let tree = test_dispatch_tree(vec![KeyBinding::new(
"cmd-n",
TestAction,
Some("ProjectPanel"),
)]);
- let mut registry = ActionRegistry::default();
+ let contexts = vec![
+ KeyContext::parse("Workspace").unwrap(),
+ KeyContext::parse("ProjectPanel").unwrap(),
+ ];
+
+ let keybinding = tree.bindings_for_action(&TestAction, &contexts);
+
+ assert!(keybinding[0].action.partial_eq(&TestAction))
+ }
+
+ #[test]
+ fn test_bindings_for_action_hides_targeted_unbind_in_active_context() {
+ let tree = test_dispatch_tree(vec![
+ KeyBinding::new("tab", TestAction, Some("Editor")),
+ KeyBinding::new(
+ "tab",
+ Unbind("dispatch_test::TestAction".into()),
+ Some("Editor && edit_prediction"),
+ ),
+ KeyBinding::new(
+ "tab",
+ SecondaryTestAction,
+ Some("Editor && showing_completions"),
+ ),
+ ]);
+
+ let contexts = vec![
+ KeyContext::parse("Workspace").unwrap(),
+ KeyContext::parse("Editor showing_completions edit_prediction").unwrap(),
+ ];
- registry.load_action::<TestAction>();
+ let bindings = tree.bindings_for_action(&TestAction, &contexts);
+ assert!(bindings.is_empty());
- let keymap = Rc::new(RefCell::new(keymap));
+ let highest = tree.highest_precedence_binding_for_action(&TestAction, &contexts);
+ assert!(highest.is_none());
+
+ let fallback_bindings = tree.bindings_for_action(&SecondaryTestAction, &contexts);
+ assert_eq!(fallback_bindings.len(), 1);
+ assert!(fallback_bindings[0].action.partial_eq(&SecondaryTestAction));
+ }
- let tree = DispatchTree::new(keymap, Rc::new(registry));
+ #[test]
+ fn test_bindings_for_action_keeps_targeted_binding_outside_unbind_context() {
+ let tree = test_dispatch_tree(vec![
+ KeyBinding::new("tab", TestAction, Some("Editor")),
+ KeyBinding::new(
+ "tab",
+ Unbind("dispatch_test::TestAction".into()),
+ Some("Editor && edit_prediction"),
+ ),
+ KeyBinding::new(
+ "tab",
+ SecondaryTestAction,
+ Some("Editor && showing_completions"),
+ ),
+ ]);
let contexts = vec![
KeyContext::parse("Workspace").unwrap(),
- KeyContext::parse("ProjectPanel").unwrap(),
+ KeyContext::parse("Editor").unwrap(),
];
- let keybinding = tree.bindings_for_action(&TestAction, &contexts);
+ let bindings = tree.bindings_for_action(&TestAction, &contexts);
+ assert_eq!(bindings.len(), 1);
+ assert!(bindings[0].action.partial_eq(&TestAction));
- assert!(keybinding[0].action.partial_eq(&TestAction))
+ let highest = tree.highest_precedence_binding_for_action(&TestAction, &contexts);
+ assert!(highest.is_some_and(|binding| binding.action.partial_eq(&TestAction)));
}
#[test]
@@ -698,10 +731,7 @@ mod tests {
KeyBinding::new("space", TestAction, Some("ContextA")),
KeyBinding::new("space f g", TestAction, Some("ContextB")),
];
- let keymap = Rc::new(RefCell::new(Keymap::new(bindings)));
- let mut registry = ActionRegistry::default();
- registry.load_action::<TestAction>();
- let mut tree = DispatchTree::new(keymap, Rc::new(registry));
+ let mut tree = test_dispatch_tree(bindings);
type DispatchPath = SmallVec<[super::DispatchNodeId; 32]>;
fn dispatch(
@@ -4,7 +4,7 @@ mod context;
pub use binding::*;
pub use context::*;
-use crate::{Action, AsKeystroke, Keystroke, is_no_action};
+use crate::{Action, AsKeystroke, Keystroke, Unbind, is_no_action, is_unbind};
use collections::{HashMap, HashSet};
use smallvec::SmallVec;
use std::any::TypeId;
@@ -19,7 +19,7 @@ pub struct KeymapVersion(usize);
pub struct Keymap {
bindings: Vec<KeyBinding>,
binding_indices_by_action_id: HashMap<TypeId, SmallVec<[usize; 3]>>,
- no_action_binding_indices: Vec<usize>,
+ disabled_binding_indices: Vec<usize>,
version: KeymapVersion,
}
@@ -27,6 +27,26 @@ pub struct Keymap {
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct BindingIndex(usize);
+fn disabled_binding_matches_context(disabled_binding: &KeyBinding, binding: &KeyBinding) -> bool {
+ match (
+ &disabled_binding.context_predicate,
+ &binding.context_predicate,
+ ) {
+ (None, _) => true,
+ (Some(_), None) => false,
+ (Some(disabled_predicate), Some(predicate)) => disabled_predicate.is_superset(predicate),
+ }
+}
+
+fn binding_is_unbound(disabled_binding: &KeyBinding, binding: &KeyBinding) -> bool {
+ disabled_binding.keystrokes == binding.keystrokes
+ && disabled_binding
+ .action()
+ .as_any()
+ .downcast_ref::<Unbind>()
+ .is_some_and(|unbind| unbind.0.as_ref() == binding.action.name())
+}
+
impl Keymap {
/// Create a new keymap with the given bindings.
pub fn new(bindings: Vec<KeyBinding>) -> Self {
@@ -44,8 +64,8 @@ impl Keymap {
pub fn add_bindings<T: IntoIterator<Item = KeyBinding>>(&mut self, bindings: T) {
for binding in bindings {
let action_id = binding.action().as_any().type_id();
- if is_no_action(&*binding.action) {
- self.no_action_binding_indices.push(self.bindings.len());
+ if is_no_action(&*binding.action) || is_unbind(&*binding.action) {
+ self.disabled_binding_indices.push(self.bindings.len());
} else {
self.binding_indices_by_action_id
.entry(action_id)
@@ -62,7 +82,7 @@ impl Keymap {
pub fn clear(&mut self) {
self.bindings.clear();
self.binding_indices_by_action_id.clear();
- self.no_action_binding_indices.clear();
+ self.disabled_binding_indices.clear();
self.version.0 += 1;
}
@@ -90,21 +110,22 @@ impl Keymap {
return None;
}
- for null_ix in &self.no_action_binding_indices {
- if null_ix > ix {
- let null_binding = &self.bindings[*null_ix];
- if null_binding.keystrokes == binding.keystrokes {
- let null_binding_matches =
- match (&null_binding.context_predicate, &binding.context_predicate) {
- (None, _) => true,
- (Some(_), None) => false,
- (Some(null_predicate), Some(predicate)) => {
- null_predicate.is_superset(predicate)
- }
- };
- if null_binding_matches {
+ for disabled_ix in &self.disabled_binding_indices {
+ if disabled_ix > ix {
+ let disabled_binding = &self.bindings[*disabled_ix];
+ if disabled_binding.keystrokes != binding.keystrokes {
+ continue;
+ }
+
+ if is_no_action(&*disabled_binding.action) {
+ if disabled_binding_matches_context(disabled_binding, binding) {
return None;
}
+ } else if is_unbind(&*disabled_binding.action)
+ && disabled_binding_matches_context(disabled_binding, binding)
+ && binding_is_unbound(disabled_binding, binding)
+ {
+ return None;
}
}
}
@@ -170,6 +191,7 @@ impl Keymap {
let mut bindings: SmallVec<[_; 1]> = SmallVec::new();
let mut first_binding_index = None;
+ let mut unbound_bindings: Vec<&KeyBinding> = Vec::new();
for (_, ix, binding) in matched_bindings {
if is_no_action(&*binding.action) {
@@ -186,6 +208,19 @@ impl Keymap {
// For non-user NoAction bindings, continue searching for user overrides
continue;
}
+
+ if is_unbind(&*binding.action) {
+ unbound_bindings.push(binding);
+ continue;
+ }
+
+ if unbound_bindings
+ .iter()
+ .any(|disabled_binding| binding_is_unbound(disabled_binding, binding))
+ {
+ continue;
+ }
+
bindings.push(binding.clone());
first_binding_index.get_or_insert(ix);
}
@@ -197,7 +232,7 @@ impl Keymap {
{
continue;
}
- if is_no_action(&*binding.action) {
+ if is_no_action(&*binding.action) || is_unbind(&*binding.action) {
pending.remove(&&binding.keystrokes);
continue;
}
@@ -232,7 +267,10 @@ impl Keymap {
match pending {
None => None,
Some(is_pending) => {
- if !is_pending || is_no_action(&*binding.action) {
+ if !is_pending
+ || is_no_action(&*binding.action)
+ || is_unbind(&*binding.action)
+ {
return None;
}
Some((depth, BindingIndex(ix), binding))
@@ -256,7 +294,7 @@ impl Keymap {
mod tests {
use super::*;
use crate as gpui;
- use gpui::NoAction;
+ use gpui::{NoAction, Unbind};
actions!(
test_only,
@@ -720,6 +758,76 @@ mod tests {
}
}
+ #[test]
+ fn test_targeted_unbind_ignores_target_context() {
+ let bindings = [
+ KeyBinding::new("tab", ActionAlpha {}, Some("Editor")),
+ KeyBinding::new("tab", ActionBeta {}, Some("Editor && showing_completions")),
+ KeyBinding::new(
+ "tab",
+ Unbind("test_only::ActionAlpha".into()),
+ Some("Editor && edit_prediction"),
+ ),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings);
+
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("tab").unwrap()],
+ &[KeyContext::parse("Editor showing_completions edit_prediction").unwrap()],
+ );
+
+ assert!(!pending);
+ assert_eq!(result.len(), 1);
+ assert!(result[0].action.partial_eq(&ActionBeta {}));
+ }
+
+ #[test]
+ fn test_bindings_for_action_keeps_binding_for_narrower_targeted_unbind() {
+ let bindings = [
+ KeyBinding::new("tab", ActionAlpha {}, Some("Editor")),
+ KeyBinding::new(
+ "tab",
+ Unbind("test_only::ActionAlpha".into()),
+ Some("Editor && edit_prediction"),
+ ),
+ KeyBinding::new("tab", ActionBeta {}, Some("Editor && showing_completions")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings);
+
+ assert_bindings(&keymap, &ActionAlpha {}, &["tab"]);
+ assert_bindings(&keymap, &ActionBeta {}, &["tab"]);
+
+ #[track_caller]
+ fn assert_bindings(keymap: &Keymap, action: &dyn Action, expected: &[&str]) {
+ let actual = keymap
+ .bindings_for_action(action)
+ .map(|binding| binding.keystrokes[0].inner().unparse())
+ .collect::<Vec<_>>();
+ assert_eq!(actual, expected, "{:?}", action);
+ }
+ }
+
+ #[test]
+ fn test_bindings_for_action_removes_binding_for_broader_targeted_unbind() {
+ let bindings = [
+ KeyBinding::new("tab", ActionAlpha {}, Some("Editor && edit_prediction")),
+ KeyBinding::new(
+ "tab",
+ Unbind("test_only::ActionAlpha".into()),
+ Some("Editor"),
+ ),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings);
+
+ assert!(keymap.bindings_for_action(&ActionAlpha {}).next().is_none());
+ }
+
#[test]
fn test_source_precedence_sorting() {
// KeybindSource precedence: User (0) > Vim (1) > Base (2) > Default (3)
@@ -78,6 +78,7 @@ pub use test::{TestDispatcher, TestScreenCaptureSource, TestScreenCaptureStream}
#[cfg(all(target_os = "macos", any(test, feature = "test-support")))]
pub use visual_test::VisualTestPlatform;
+// TODO(jk): return an enum instead of a string
/// Return which compositor we're guessing we'll use.
/// Does not attempt to connect to the given compositor.
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
@@ -7,14 +7,39 @@ pub struct Menu {
/// The items in the menu
pub items: Vec<MenuItem>,
+
+ /// Whether this menu is disabled
+ pub disabled: bool,
}
impl Menu {
+ /// Create a new Menu with the given name
+ pub fn new(name: impl Into<SharedString>) -> Self {
+ Self {
+ name: name.into(),
+ items: vec![],
+ disabled: false,
+ }
+ }
+
+ /// Set items to be in this menu
+ pub fn items(mut self, items: impl IntoIterator<Item = MenuItem>) -> Self {
+ self.items = items.into_iter().collect();
+ self
+ }
+
+ /// Set whether this menu is disabled
+ pub fn disabled(mut self, disabled: bool) -> Self {
+ self.disabled = disabled;
+ self
+ }
+
/// Create an OwnedMenu from this Menu
pub fn owned(self) -> OwnedMenu {
OwnedMenu {
name: self.name.to_string().into(),
items: self.items.into_iter().map(|item| item.owned()).collect(),
+ disabled: self.disabled,
}
}
}
@@ -72,6 +97,9 @@ pub enum MenuItem {
/// Whether this action is checked
checked: bool,
+
+ /// Whether this action is disabled
+ disabled: bool,
},
}
@@ -101,6 +129,7 @@ impl MenuItem {
action: Box::new(action),
os_action: None,
checked: false,
+ disabled: false,
}
}
@@ -115,6 +144,7 @@ impl MenuItem {
action: Box::new(action),
os_action: Some(os_action),
checked: false,
+ disabled: false,
}
}
@@ -128,11 +158,13 @@ impl MenuItem {
action,
os_action,
checked,
+ disabled,
} => OwnedMenuItem::Action {
name: name.into(),
action,
os_action,
checked,
+ disabled,
},
MenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.owned()),
}
@@ -142,19 +174,49 @@ impl MenuItem {
///
/// Only for [`MenuItem::Action`], otherwise, will be ignored
pub fn checked(mut self, checked: bool) -> Self {
+ match &mut self {
+ MenuItem::Action { checked: old, .. } => {
+ *old = checked;
+ }
+ _ => {}
+ }
+ self
+ }
+
+ /// Returns whether this menu item is checked
+ ///
+ /// Only for [`MenuItem::Action`], otherwise, returns false
+ #[inline]
+ pub fn is_checked(&self) -> bool {
match self {
- MenuItem::Action {
- action,
- os_action,
- name,
- ..
- } => MenuItem::Action {
- name,
- action,
- os_action,
- checked,
- },
- _ => self,
+ MenuItem::Action { checked, .. } => *checked,
+ _ => false,
+ }
+ }
+
+ /// Set whether this menu item is disabled
+ pub fn disabled(mut self, disabled: bool) -> Self {
+ match &mut self {
+ MenuItem::Action { disabled: old, .. } => {
+ *old = disabled;
+ }
+ MenuItem::Submenu(submenu) => {
+ submenu.disabled = disabled;
+ }
+ _ => {}
+ }
+ self
+ }
+
+ /// Returns whether this menu item is disabled
+ ///
+ /// Only for [`MenuItem::Action`] and [`MenuItem::Submenu`], otherwise, returns false
+ #[inline]
+ pub fn is_disabled(&self) -> bool {
+ match self {
+ MenuItem::Action { disabled, .. } => *disabled,
+ MenuItem::Submenu(submenu) => submenu.disabled,
+ _ => false,
}
}
}
@@ -179,6 +241,9 @@ pub struct OwnedMenu {
/// The items in the menu
pub items: Vec<OwnedMenuItem>,
+
+ /// Whether this menu is disabled
+ pub disabled: bool,
}
/// The different kinds of items that can be in a menu
@@ -206,6 +271,9 @@ pub enum OwnedMenuItem {
/// Whether this action is checked
checked: bool,
+
+ /// Whether this action is disabled
+ disabled: bool,
},
}
@@ -219,11 +287,13 @@ impl Clone for OwnedMenuItem {
action,
os_action,
checked,
+ disabled,
} => OwnedMenuItem::Action {
name: name.clone(),
action: action.boxed_clone(),
os_action: *os_action,
checked: *checked,
+ disabled: *disabled,
},
OwnedMenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.clone()),
}
@@ -287,3 +357,70 @@ pub(crate) fn init_app_menus(platform: &dyn Platform, cx: &App) {
}
}));
}
+
+#[cfg(test)]
+mod tests {
+ use crate::Menu;
+
+ #[test]
+ fn test_menu() {
+ let menu = Menu::new("App")
+ .items(vec![
+ crate::MenuItem::action("Action 1", gpui::NoAction),
+ crate::MenuItem::separator(),
+ ])
+ .disabled(true);
+
+ assert_eq!(menu.name.as_ref(), "App");
+ assert_eq!(menu.items.len(), 2);
+ assert!(menu.disabled);
+ }
+
+ #[test]
+ fn test_menu_item_builder() {
+ use super::MenuItem;
+
+ let item = MenuItem::action("Test Action", gpui::NoAction);
+ assert_eq!(
+ match &item {
+ MenuItem::Action { name, .. } => name.as_ref(),
+ _ => unreachable!(),
+ },
+ "Test Action"
+ );
+ assert!(matches!(
+ item,
+ MenuItem::Action {
+ checked: false,
+ disabled: false,
+ ..
+ }
+ ));
+
+ assert!(
+ MenuItem::action("Test Action", gpui::NoAction)
+ .checked(true)
+ .is_checked()
+ );
+ assert!(
+ MenuItem::action("Test Action", gpui::NoAction)
+ .disabled(true)
+ .is_disabled()
+ );
+
+ let submenu = MenuItem::submenu(super::Menu {
+ name: "Submenu".into(),
+ items: vec![],
+ disabled: true,
+ });
+ assert_eq!(
+ match &submenu {
+ MenuItem::Submenu(menu) => menu.name.as_ref(),
+ _ => unreachable!(),
+ },
+ "Submenu"
+ );
+ assert!(!submenu.is_checked());
+ assert!(submenu.is_disabled());
+ }
+}
@@ -138,6 +138,42 @@ impl ObjectFit {
}
}
+/// The minimum size of a column or row in a grid layout
+#[derive(
+ Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Default, JsonSchema, Serialize, Deserialize,
+)]
+pub enum TemplateColumnMinSize {
+ /// The column size may be 0
+ #[default]
+ Zero,
+ /// The column size can be determined by the min content
+ MinContent,
+ /// The column size can be determined by the max content
+ MaxContent,
+}
+
+/// A simplified representation of the grid-template-* value
+#[derive(
+ Copy,
+ Clone,
+ Refineable,
+ PartialEq,
+ Eq,
+ PartialOrd,
+ Ord,
+ Debug,
+ Default,
+ JsonSchema,
+ Serialize,
+ Deserialize,
+)]
+pub struct GridTemplate {
+ /// How this template directive should be repeated
+ pub repeat: u16,
+ /// The minimum size in the repeat(<>, minmax(_, 1fr)) equation
+ pub min_size: TemplateColumnMinSize,
+}
+
/// The CSS styling that can be applied to an element via the `Styled` trait
#[derive(Clone, Refineable, Debug)]
#[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
@@ -262,16 +298,12 @@ pub struct Style {
pub opacity: Option<f32>,
/// The grid columns of this element
- /// Equivalent to the Tailwind `grid-cols-<number>`
- pub grid_cols: Option<u16>,
-
- /// The grid columns with min-content minimum sizing.
- /// Unlike grid_cols, it won't shrink to width 0 in AvailableSpace::MinContent constraints.
- pub grid_cols_min_content: Option<u16>,
+ /// Roughly equivalent to the Tailwind `grid-cols-<number>`
+ pub grid_cols: Option<GridTemplate>,
/// The row span of this element
/// Equivalent to the Tailwind `grid-rows-<number>`
- pub grid_rows: Option<u16>,
+ pub grid_rows: Option<GridTemplate>,
/// The grid location of this element
pub grid_location: Option<GridLocation>,
@@ -790,7 +822,6 @@ impl Default for Style {
opacity: None,
grid_rows: None,
grid_cols: None,
- grid_cols_min_content: None,
grid_location: None,
#[cfg(debug_assertions)]
@@ -1,9 +1,9 @@
use crate::{
self as gpui, AbsoluteLength, AlignContent, AlignItems, AlignSelf, BorderStyle, CursorStyle,
DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontFeatures, FontStyle,
- FontWeight, GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle,
- StyleRefinement, TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px,
- relative, rems,
+ FontWeight, GridPlacement, GridTemplate, Hsla, JustifyContent, Length, SharedString,
+ StrikethroughStyle, StyleRefinement, TemplateColumnMinSize, TextAlign, TextOverflow,
+ TextStyleRefinement, UnderlineStyle, WhiteSpace, px, relative, rems,
};
pub use gpui_macros::{
border_style_methods, box_shadow_style_methods, cursor_style_methods, margin_style_methods,
@@ -711,20 +711,38 @@ pub trait Styled: Sized {
/// Sets the grid columns of this element.
fn grid_cols(mut self, cols: u16) -> Self {
- self.style().grid_cols = Some(cols);
+ self.style().grid_cols = Some(GridTemplate {
+ repeat: cols,
+ min_size: TemplateColumnMinSize::Zero,
+ });
self
}
/// Sets the grid columns with min-content minimum sizing.
/// Unlike grid_cols, it won't shrink to width 0 in AvailableSpace::MinContent constraints.
fn grid_cols_min_content(mut self, cols: u16) -> Self {
- self.style().grid_cols_min_content = Some(cols);
+ self.style().grid_cols = Some(GridTemplate {
+ repeat: cols,
+ min_size: TemplateColumnMinSize::MinContent,
+ });
+ self
+ }
+
+ /// Sets the grid columns with max-content maximum sizing for content-based column widths.
+ fn grid_cols_max_content(mut self, cols: u16) -> Self {
+ self.style().grid_cols = Some(GridTemplate {
+ repeat: cols,
+ min_size: TemplateColumnMinSize::MaxContent,
+ });
self
}
/// Sets the grid rows of this element.
fn grid_rows(mut self, rows: u16) -> Self {
- self.style().grid_rows = Some(rows);
+ self.style().grid_rows = Some(GridTemplate {
+ repeat: rows,
+ min_size: TemplateColumnMinSize::Zero,
+ });
self
}
@@ -1,6 +1,6 @@
use crate::{
- AbsoluteLength, App, Bounds, DefiniteLength, Edges, Length, Pixels, Point, Size, Style, Window,
- point, size,
+ AbsoluteLength, App, Bounds, DefiniteLength, Edges, GridTemplate, Length, Pixels, Point, Size,
+ Style, Window, point, size,
};
use collections::{FxHashMap, FxHashSet};
use stacksafe::{StackSafe, stacksafe};
@@ -8,7 +8,7 @@ use std::{fmt::Debug, ops::Range};
use taffy::{
TaffyTree, TraversePartialTree as _,
geometry::{Point as TaffyPoint, Rect as TaffyRect, Size as TaffySize},
- prelude::min_content,
+ prelude::{max_content, min_content},
style::AvailableSpace as TaffyAvailableSpace,
tree::NodeId,
};
@@ -308,19 +308,31 @@ impl ToTaffy<taffy::style::Style> for Style {
}
fn to_grid_repeat<T: taffy::style::CheapCloneStr>(
- unit: &Option<u16>,
+ unit: &Option<GridTemplate>,
) -> Vec<taffy::GridTemplateComponent<T>> {
- // grid-template-columns: repeat(<number>, minmax(0, 1fr));
- unit.map(|count| vec![repeat(count, vec![minmax(length(0.0), fr(1.0))])])
- .unwrap_or_default()
- }
-
- fn to_grid_repeat_min_content<T: taffy::style::CheapCloneStr>(
- unit: &Option<u16>,
- ) -> Vec<taffy::GridTemplateComponent<T>> {
- // grid-template-columns: repeat(<number>, minmax(min-content, 1fr));
- unit.map(|count| vec![repeat(count, vec![minmax(min_content(), fr(1.0))])])
- .unwrap_or_default()
+ unit.map(|template| {
+ match template.min_size {
+ // grid-template-*: repeat(<number>, minmax(0, 1fr));
+ crate::TemplateColumnMinSize::Zero => {
+ vec![repeat(template.repeat, vec![minmax(length(0.0), fr(1.0))])]
+ }
+ // grid-template-*: repeat(<number>, minmax(min-content, 1fr));
+ crate::TemplateColumnMinSize::MinContent => {
+ vec![repeat(
+ template.repeat,
+ vec![minmax(min_content(), fr(1.0))],
+ )]
+ }
+ // grid-template-*: repeat(<number>, minmax(0, max-content))
+ crate::TemplateColumnMinSize::MaxContent => {
+ vec![repeat(
+ template.repeat,
+ vec![minmax(length(0.0), max_content())],
+ )]
+ }
+ }
+ })
+ .unwrap_or_default()
}
taffy::style::Style {
@@ -347,11 +359,7 @@ impl ToTaffy<taffy::style::Style> for Style {
flex_grow: self.flex_grow,
flex_shrink: self.flex_shrink,
grid_template_rows: to_grid_repeat(&self.grid_rows),
- grid_template_columns: if self.grid_cols_min_content.is_some() {
- to_grid_repeat_min_content(&self.grid_cols_min_content)
- } else {
- to_grid_repeat(&self.grid_cols)
- },
+ grid_template_columns: to_grid_repeat(&self.grid_cols),
grid_row: self
.grid_location
.as_ref()
@@ -240,9 +240,9 @@ impl LineWrapper {
matches!(c, '\u{0980}'..='\u{09FF}') ||
// Some other known special characters that should be treated as word characters,
- // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`,
+ // e.g. `a-b`, `var_name`, `I'm`/`won’t`, '@mention`, `#hashtag`, `100%`, `3.1415`,
// `2^3`, `a~b`, `a=1`, `Self::new`, etc.
- matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '=' | ':') ||
+ matches!(c, '-' | '_' | '.' | '\'' | '’' | '‘' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '=' | ':') ||
// `⋯` character is special used in Zed, to keep this at the end of the line.
matches!(c, '⋯')
}
@@ -838,6 +838,8 @@ mod tests {
assert_word("a=1");
assert_word("Self::is_word_char");
assert_word("more⋯");
+ assert_word("won’t");
+ assert_word("‘twas");
// Space
assert_not_word("foo bar");
@@ -560,7 +560,8 @@ pub enum WindowControlArea {
pub struct HitboxId(u64);
impl HitboxId {
- /// Checks if the hitbox with this ID is currently hovered. Except when handling
+ /// Checks if the hitbox with this ID is currently hovered. Returns `false` during keyboard
+ /// input modality so that keyboard navigation suppresses hover highlights. Except when handling
/// `ScrollWheelEvent`, this is typically what you want when determining whether to handle mouse
/// events or paint hover styles.
///
@@ -570,6 +571,9 @@ impl HitboxId {
if window.captured_hitbox == Some(self) {
return true;
}
+ if window.last_input_was_keyboard() {
+ return false;
+ }
let hit_test = &window.mouse_hit_test;
for id in hit_test.ids.iter().take(hit_test.hover_hitbox_count) {
if self == *id {
@@ -608,13 +612,15 @@ pub struct Hitbox {
}
impl Hitbox {
- /// Checks if the hitbox is currently hovered. Except when handling `ScrollWheelEvent`, this is
- /// typically what you want when determining whether to handle mouse events or paint hover
- /// styles.
+ /// Checks if the hitbox is currently hovered. Returns `false` during keyboard input modality
+ /// so that keyboard navigation suppresses hover highlights. Except when handling
+ /// `ScrollWheelEvent`, this is typically what you want when determining whether to handle mouse
+ /// events or paint hover styles.
///
/// This can return `false` even when the hitbox contains the mouse, if a hitbox in front of
/// this sets `HitboxBehavior::BlockMouse` (`InteractiveElement::occlude`) or
- /// `HitboxBehavior::BlockMouseExceptScroll` (`InteractiveElement::block_mouse_except_scroll`).
+ /// `HitboxBehavior::BlockMouseExceptScroll` (`InteractiveElement::block_mouse_except_scroll`),
+ /// or if the current input modality is keyboard (see [`Window::last_input_was_keyboard`]).
///
/// Handling of `ScrollWheelEvent` should typically use `should_handle_scroll` instead.
/// Concretely, this is due to use-cases like overlays that cause the elements under to be
@@ -2338,10 +2344,7 @@ impl Window {
#[cfg(any(feature = "inspector", debug_assertions))]
let inspector_element = self.prepaint_inspector(_inspector_width, cx);
- let mut sorted_deferred_draws =
- (0..self.next_frame.deferred_draws.len()).collect::<SmallVec<[_; 8]>>();
- sorted_deferred_draws.sort_by_key(|ix| self.next_frame.deferred_draws[*ix].priority);
- self.prepaint_deferred_draws(&sorted_deferred_draws, cx);
+ self.prepaint_deferred_draws(cx);
let mut prompt_element = None;
let mut active_drag_element = None;
@@ -2370,7 +2373,7 @@ impl Window {
#[cfg(any(feature = "inspector", debug_assertions))]
self.paint_inspector(inspector_element, cx);
- self.paint_deferred_draws(&sorted_deferred_draws, cx);
+ self.paint_deferred_draws(cx);
if let Some(mut prompt_element) = prompt_element {
prompt_element.paint(self, cx);
@@ -2453,25 +2456,40 @@ impl Window {
None
}
- fn prepaint_deferred_draws(&mut self, deferred_draw_indices: &[usize], cx: &mut App) {
+ fn prepaint_deferred_draws(&mut self, cx: &mut App) {
assert_eq!(self.element_id_stack.len(), 0);
- let mut deferred_draws = mem::take(&mut self.next_frame.deferred_draws);
- for deferred_draw_ix in deferred_draw_indices {
- let deferred_draw = &mut deferred_draws[*deferred_draw_ix];
- self.element_id_stack
- .clone_from(&deferred_draw.element_id_stack);
- self.text_style_stack
- .clone_from(&deferred_draw.text_style_stack);
- self.next_frame
- .dispatch_tree
- .set_active_node(deferred_draw.parent_node);
+ let mut completed_draws = Vec::new();
+
+ // Process deferred draws in multiple rounds to support nesting.
+ // Each round processes all current deferred draws, which may produce new ones.
+ let mut depth = 0;
+ loop {
+ // Limit maximum nesting depth to prevent infinite loops.
+ assert!(depth < 10, "Exceeded maximum (10) deferred depth");
+ depth += 1;
+ let deferred_count = self.next_frame.deferred_draws.len();
+ if deferred_count == 0 {
+ break;
+ }
- let prepaint_start = self.prepaint_index();
- let content_mask = deferred_draw.content_mask.clone();
- if let Some(element) = deferred_draw.element.as_mut() {
- self.with_rendered_view(deferred_draw.current_view, |window| {
- window.with_content_mask(content_mask, |window| {
+ // Sort by priority for this round
+ let traversal_order = self.deferred_draw_traversal_order();
+ let mut deferred_draws = mem::take(&mut self.next_frame.deferred_draws);
+
+ for deferred_draw_ix in traversal_order {
+ let deferred_draw = &mut deferred_draws[deferred_draw_ix];
+ self.element_id_stack
+ .clone_from(&deferred_draw.element_id_stack);
+ self.text_style_stack
+ .clone_from(&deferred_draw.text_style_stack);
+ self.next_frame
+ .dispatch_tree
+ .set_active_node(deferred_draw.parent_node);
+
+ let prepaint_start = self.prepaint_index();
+ if let Some(element) = deferred_draw.element.as_mut() {
+ self.with_rendered_view(deferred_draw.current_view, |window| {
window.with_rem_size(Some(deferred_draw.rem_size), |window| {
window.with_absolute_element_offset(
deferred_draw.absolute_offset,
@@ -2480,30 +2498,38 @@ impl Window {
},
);
});
- });
- })
- } else {
- self.reuse_prepaint(deferred_draw.prepaint_range.clone());
+ })
+ } else {
+ self.reuse_prepaint(deferred_draw.prepaint_range.clone());
+ }
+ let prepaint_end = self.prepaint_index();
+ deferred_draw.prepaint_range = prepaint_start..prepaint_end;
}
- let prepaint_end = self.prepaint_index();
- deferred_draw.prepaint_range = prepaint_start..prepaint_end;
+
+ // Save completed draws and continue with newly added ones
+ completed_draws.append(&mut deferred_draws);
+
+ self.element_id_stack.clear();
+ self.text_style_stack.clear();
}
- assert_eq!(
- self.next_frame.deferred_draws.len(),
- 0,
- "cannot call defer_draw during deferred drawing"
- );
- self.next_frame.deferred_draws = deferred_draws;
- self.element_id_stack.clear();
- self.text_style_stack.clear();
+
+ // Restore all completed draws
+ self.next_frame.deferred_draws = completed_draws;
}
- fn paint_deferred_draws(&mut self, deferred_draw_indices: &[usize], cx: &mut App) {
+ fn paint_deferred_draws(&mut self, cx: &mut App) {
assert_eq!(self.element_id_stack.len(), 0);
+ // Paint all deferred draws in priority order.
+ // Since prepaint has already processed nested deferreds, we just paint them all.
+ if self.next_frame.deferred_draws.len() == 0 {
+ return;
+ }
+
+ let traversal_order = self.deferred_draw_traversal_order();
let mut deferred_draws = mem::take(&mut self.next_frame.deferred_draws);
- for deferred_draw_ix in deferred_draw_indices {
- let mut deferred_draw = &mut deferred_draws[*deferred_draw_ix];
+ for deferred_draw_ix in traversal_order {
+ let mut deferred_draw = &mut deferred_draws[deferred_draw_ix];
self.element_id_stack
.clone_from(&deferred_draw.element_id_stack);
self.next_frame
@@ -2530,6 +2556,13 @@ impl Window {
self.element_id_stack.clear();
}
+ fn deferred_draw_traversal_order(&mut self) -> SmallVec<[usize; 8]> {
+ let deferred_count = self.next_frame.deferred_draws.len();
+ let mut sorted_indices = (0..deferred_count).collect::<SmallVec<[_; 8]>>();
+ sorted_indices.sort_by_key(|ix| self.next_frame.deferred_draws[*ix].priority);
+ sorted_indices
+ }
+
pub(crate) fn prepaint_index(&self) -> PrepaintStateIndex {
PrepaintStateIndex {
hitboxes_index: self.next_frame.hitboxes.len(),
@@ -4028,14 +4061,18 @@ impl Window {
/// Dispatch a mouse or keyboard event on the window.
#[profiling::function]
pub fn dispatch_event(&mut self, event: PlatformInput, cx: &mut App) -> DispatchEventResult {
- // Track whether this input was keyboard-based for focus-visible styling
+ // Track input modality for focus-visible styling and hover suppression.
+ // Hover is suppressed during keyboard modality so that keyboard navigation
+ // doesn't show hover highlights on the item under the mouse cursor.
+ let old_modality = self.last_input_modality;
self.last_input_modality = match &event {
- PlatformInput::KeyDown(_) | PlatformInput::ModifiersChanged(_) => {
- InputModality::Keyboard
- }
- PlatformInput::MouseDown(e) if e.is_focusing() => InputModality::Mouse,
+ PlatformInput::KeyDown(_) => InputModality::Keyboard,
+ PlatformInput::MouseMove(_) | PlatformInput::MouseDown(_) => InputModality::Mouse,
_ => self.last_input_modality,
};
+ if self.last_input_modality != old_modality {
+ self.refresh();
+ }
// Handlers may set this to false by calling `stop_propagation`.
cx.propagate_event = true;
@@ -57,7 +57,7 @@ pub(crate) trait LinuxClient {
#[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
- false
+ true
}
#[cfg(feature = "screen-capture")]
@@ -633,28 +633,42 @@ pub(super) fn open_uri_internal(
if let Some(uri) = ashpd::Uri::parse(uri).log_err() {
executor
.spawn(async move {
- match ashpd::desktop::open_uri::OpenFileRequest::default()
- .activation_token(activation_token.clone().map(ashpd::ActivationToken::from))
- .send_uri(&uri)
- .await
- .and_then(|e| e.response())
- {
- Ok(()) => return,
- Err(e) => log::error!("Failed to open with dbus: {}", e),
- }
-
+ let mut xdg_open_failed = false;
for mut command in open::commands(uri.to_string()) {
if let Some(token) = activation_token.as_ref() {
command.env("XDG_ACTIVATION_TOKEN", token);
}
let program = format!("{:?}", command.get_program());
match smol::process::Command::from(command).spawn() {
- Ok(mut cmd) => {
- cmd.status().await.log_err();
- return;
+ Ok(mut cmd) => match cmd.status().await {
+ Ok(status) if status.success() => return,
+ Ok(status) => {
+ log::error!("Command {} exited with status: {}", program, status);
+ xdg_open_failed = true;
+ }
+ Err(e) => {
+ log::error!("Failed to get status from {}: {}", program, e);
+ xdg_open_failed = true;
+ }
+ },
+ Err(e) => {
+ log::error!("Failed to open with {}: {}", program, e);
+ xdg_open_failed = true;
}
+ }
+ }
+
+ if xdg_open_failed {
+ match ashpd::desktop::open_uri::OpenFileRequest::default()
+ .activation_token(activation_token.map(ashpd::ActivationToken::from))
+ .send_uri(&uri)
+ .await
+ .and_then(|e| e.response())
+ {
+ Ok(()) => {}
+ Err(ashpd::Error::Response(ashpd::desktop::ResponseError::Cancelled)) => {}
Err(e) => {
- log::error!("Failed to open with {}: {}", program, e)
+ log::error!("Failed to open with dbus: {}", e);
}
}
}
@@ -702,11 +702,6 @@ impl LinuxClient for WaylandClient {
None
}
- #[cfg(feature = "screen-capture")]
- fn is_screen_capture_supported(&self) -> bool {
- false
- }
-
#[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
@@ -52,6 +52,7 @@ pub(crate) struct Callbacks {
appearance_changed: Option<Box<dyn FnMut()>>,
}
+#[derive(Debug, Clone, Copy)]
struct RawWindow {
window: *mut c_void,
display: *mut c_void,
@@ -600,6 +601,7 @@ impl WaylandWindowStatePtr {
state.tiling = configure.tiling;
// Limit interactive resizes to once per vblank
if configure.resizing && state.resize_throttle {
+ state.surface_state.ack_configure(serial);
return;
} else if configure.resizing {
state.resize_throttle = true;
@@ -1347,23 +1349,13 @@ impl PlatformWindow for WaylandWindow {
.display_ptr()
.cast::<std::ffi::c_void>(),
};
- let display_handle = rwh::HasDisplayHandle::display_handle(&raw_window)
- .unwrap()
- .as_raw();
- let window_handle = rwh::HasWindowHandle::window_handle(&raw_window)
- .unwrap()
- .as_raw();
-
- state
- .renderer
- .recover(display_handle, window_handle)
- .unwrap_or_else(|err| {
- panic!(
- "GPU device lost and recovery failed. \
+ state.renderer.recover(&raw_window).unwrap_or_else(|err| {
+ panic!(
+ "GPU device lost and recovery failed. \
This may happen after system suspend/resume. \
Please restart the application.\n\nError: {err}"
- )
- });
+ )
+ });
// The current scene references atlas textures that were cleared during recovery.
// Skip this frame and let the next frame rebuild the scene with fresh textures.
@@ -225,6 +225,7 @@ fn find_visuals(xcb: &XCBConnection, screen_index: usize) -> VisualSet {
set
}
+#[derive(Debug, Clone, Copy)]
struct RawWindow {
connection: *mut c_void,
screen_id: usize,
@@ -533,7 +534,7 @@ impl X11WindowState {
&& let Some(title) = titlebar.title
{
check_reply(
- || "X11 ChangeProperty8 on window title failed.",
+ || "X11 ChangeProperty8 on WM_NAME failed.",
xcb.change_property8(
xproto::PropMode::REPLACE,
x_window,
@@ -542,6 +543,16 @@ impl X11WindowState {
title.as_bytes(),
),
)?;
+ check_reply(
+ || "X11 ChangeProperty8 on _NET_WM_NAME failed.",
+ xcb.change_property8(
+ xproto::PropMode::REPLACE,
+ x_window,
+ atoms._NET_WM_NAME,
+ atoms.UTF8_STRING,
+ title.as_bytes(),
+ ),
+ )?;
}
if params.kind == WindowKind::PopUp {
@@ -1603,23 +1614,13 @@ impl PlatformWindow for X11Window {
window_id: self.0.x_window,
visual_id: inner.visual_id,
};
- let display_handle = rwh::HasDisplayHandle::display_handle(&raw_window)
- .unwrap()
- .as_raw();
- let window_handle = rwh::HasWindowHandle::window_handle(&raw_window)
- .unwrap()
- .as_raw();
-
- inner
- .renderer
- .recover(display_handle, window_handle)
- .unwrap_or_else(|err| {
- panic!(
- "GPU device lost and recovery failed. \
+ inner.renderer.recover(&raw_window).unwrap_or_else(|err| {
+ panic!(
+ "GPU device lost and recovery failed. \
This may happen after system suspend/resume. \
Please restart the application.\n\nError: {err}"
- )
- });
+ )
+ });
// The current scene references atlas textures that were cleared during recovery.
// Skip this frame and let the next frame rebuild the scene with fresh textures.
@@ -1,16 +1,23 @@
use core::slice;
-use std::ffi::c_void;
+use std::ffi::{CStr, c_void};
+use std::path::PathBuf;
use cocoa::{
- appkit::{NSPasteboard, NSPasteboardTypePNG, NSPasteboardTypeString, NSPasteboardTypeTIFF},
+ appkit::{
+ NSFilenamesPboardType, NSPasteboard, NSPasteboardTypePNG, NSPasteboardTypeString,
+ NSPasteboardTypeTIFF,
+ },
base::{id, nil},
- foundation::NSData,
+ foundation::{NSArray, NSData, NSFastEnumeration, NSString},
};
use objc::{msg_send, runtime::Object, sel, sel_impl};
+use smallvec::SmallVec;
use strum::IntoEnumIterator as _;
use crate::ns_string;
-use gpui::{ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, hash};
+use gpui::{
+ ClipboardEntry, ClipboardItem, ClipboardString, ExternalPaths, Image, ImageFormat, hash,
+};
pub struct Pasteboard {
inner: id,
@@ -41,28 +48,37 @@ impl Pasteboard {
}
pub fn read(&self) -> Option<ClipboardItem> {
- // First, see if it's a string.
unsafe {
- let pasteboard_types: id = self.inner.types();
- let string_type: id = ns_string("public.utf8-plain-text");
+ // Check for file paths first
+ let filenames = NSPasteboard::propertyListForType(self.inner, NSFilenamesPboardType);
+ if filenames != nil && NSArray::count(filenames) > 0 {
+ let mut paths = SmallVec::new();
+ for file in filenames.iter() {
+ let f = NSString::UTF8String(file);
+ let path = CStr::from_ptr(f).to_string_lossy().into_owned();
+ paths.push(PathBuf::from(path));
+ }
+ if !paths.is_empty() {
+ let mut entries = vec![ClipboardEntry::ExternalPaths(ExternalPaths(paths))];
+
+ // Also include the string representation so text editors can
+ // paste the path as text.
+ if let Some(string_item) = self.read_string_from_pasteboard() {
+ entries.push(string_item);
+ }
- if msg_send![pasteboard_types, containsObject: string_type] {
- let data = self.inner.dataForType(string_type);
- if data == nil {
- return None;
- } else if data.bytes().is_null() {
- // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc
- // "If the length of the NSData object is 0, this property returns nil."
- return Some(self.read_string(&[]));
- } else {
- let bytes =
- slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize);
-
- return Some(self.read_string(bytes));
+ return Some(ClipboardItem { entries });
}
}
- // If it wasn't a string, try the various supported image types.
+ // Next, check for a plain string.
+ if let Some(string_entry) = self.read_string_from_pasteboard() {
+ return Some(ClipboardItem {
+ entries: vec![string_entry],
+ });
+ }
+
+ // Finally, try the various supported image types.
for format in ImageFormat::iter() {
if let Some(item) = self.read_image(format) {
return Some(item);
@@ -70,7 +86,6 @@ impl Pasteboard {
}
}
- // If it wasn't a string or a supported image type, give up.
None
}
@@ -94,8 +109,26 @@ impl Pasteboard {
}
}
- fn read_string(&self, text_bytes: &[u8]) -> ClipboardItem {
+ unsafe fn read_string_from_pasteboard(&self) -> Option<ClipboardEntry> {
unsafe {
+ let pasteboard_types: id = self.inner.types();
+ let string_type: id = ns_string("public.utf8-plain-text");
+
+ if !msg_send![pasteboard_types, containsObject: string_type] {
+ return None;
+ }
+
+ let data = self.inner.dataForType(string_type);
+ let text_bytes: &[u8] = if data == nil {
+ return None;
+ } else if data.bytes().is_null() {
+ // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc
+ // "If the length of the NSData object is 0, this property returns nil."
+ &[]
+ } else {
+ slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize)
+ };
+
let text = String::from_utf8_lossy(text_bytes).to_string();
let metadata = self
.data_for_type(self.text_hash_type)
@@ -111,9 +144,7 @@ impl Pasteboard {
}
});
- ClipboardItem {
- entries: vec![ClipboardEntry::String(ClipboardString { text, metadata })],
- }
+ Some(ClipboardEntry::String(ClipboardString { text, metadata }))
}
}
@@ -300,12 +331,44 @@ impl UTType {
#[cfg(test)]
mod tests {
- use cocoa::{appkit::NSPasteboardTypeString, foundation::NSData};
+ use cocoa::{
+ appkit::{NSFilenamesPboardType, NSPasteboard, NSPasteboardTypeString},
+ base::{id, nil},
+ foundation::{NSArray, NSData},
+ };
+ use std::ffi::c_void;
- use gpui::{ClipboardEntry, ClipboardItem, ClipboardString};
+ use gpui::{ClipboardEntry, ClipboardItem, ClipboardString, ImageFormat};
use super::*;
+ unsafe fn simulate_external_file_copy(pasteboard: &Pasteboard, paths: &[&str]) {
+ unsafe {
+ let ns_paths: Vec<id> = paths.iter().map(|p| ns_string(p)).collect();
+ let ns_array = NSArray::arrayWithObjects(nil, &ns_paths);
+
+ let mut types = vec![NSFilenamesPboardType];
+ types.push(NSPasteboardTypeString);
+
+ let types_array = NSArray::arrayWithObjects(nil, &types);
+ pasteboard.inner.declareTypes_owner(types_array, nil);
+
+ pasteboard
+ .inner
+ .setPropertyList_forType(ns_array, NSFilenamesPboardType);
+
+ let joined = paths.join("\n");
+ let bytes = NSData::dataWithBytes_length_(
+ nil,
+ joined.as_ptr() as *const c_void,
+ joined.len() as u64,
+ );
+ pasteboard
+ .inner
+ .setData_forType(bytes, NSPasteboardTypeString);
+ }
+ }
+
#[test]
fn test_string() {
let pasteboard = Pasteboard::unique();
@@ -339,4 +402,124 @@ mod tests {
Some(ClipboardItem::new_string(text_from_other_app.to_string()))
);
}
+
+ #[test]
+ fn test_read_external_path() {
+ let pasteboard = Pasteboard::unique();
+
+ unsafe {
+ simulate_external_file_copy(&pasteboard, &["/test.txt"]);
+ }
+
+ let item = pasteboard.read().expect("should read clipboard item");
+
+ // Test both ExternalPaths and String entries exist
+ assert_eq!(item.entries.len(), 2);
+
+ // Test first entry is ExternalPaths
+ match &item.entries[0] {
+ ClipboardEntry::ExternalPaths(ep) => {
+ assert_eq!(ep.paths(), &[PathBuf::from("/test.txt")]);
+ }
+ other => panic!("expected ExternalPaths, got {:?}", other),
+ }
+
+ // Test second entry is String
+ match &item.entries[1] {
+ ClipboardEntry::String(s) => {
+ assert_eq!(s.text(), "/test.txt");
+ }
+ other => panic!("expected String, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn test_read_external_paths_with_spaces() {
+ let pasteboard = Pasteboard::unique();
+ let paths = ["/some file with spaces.txt"];
+
+ unsafe {
+ simulate_external_file_copy(&pasteboard, &paths);
+ }
+
+ let item = pasteboard.read().expect("should read clipboard item");
+
+ match &item.entries[0] {
+ ClipboardEntry::ExternalPaths(ep) => {
+ assert_eq!(ep.paths(), &[PathBuf::from("/some file with spaces.txt")]);
+ }
+ other => panic!("expected ExternalPaths, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn test_read_multiple_external_paths() {
+ let pasteboard = Pasteboard::unique();
+ let paths = ["/file.txt", "/image.png"];
+
+ unsafe {
+ simulate_external_file_copy(&pasteboard, &paths);
+ }
+
+ let item = pasteboard.read().expect("should read clipboard item");
+ assert_eq!(item.entries.len(), 2);
+
+ // Test both ExternalPaths and String entries exist
+ match &item.entries[0] {
+ ClipboardEntry::ExternalPaths(ep) => {
+ assert_eq!(
+ ep.paths(),
+ &[PathBuf::from("/file.txt"), PathBuf::from("/image.png"),]
+ );
+ }
+ other => panic!("expected ExternalPaths, got {:?}", other),
+ }
+
+ match &item.entries[1] {
+ ClipboardEntry::String(s) => {
+ assert_eq!(s.text(), "/file.txt\n/image.png");
+ assert_eq!(s.metadata, None);
+ }
+ other => panic!("expected String, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn test_read_image() {
+ let pasteboard = Pasteboard::unique();
+
+ // Smallest valid PNG: 1x1 transparent pixel
+ let png_bytes: &[u8] = &[
+ 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48,
+ 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00,
+ 0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78,
+ 0x9C, 0x62, 0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0xE5, 0x27, 0xDE, 0xFC, 0x00, 0x00,
+ 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
+ ];
+
+ unsafe {
+ let ns_png_type = NSPasteboardTypePNG;
+ let types_array = NSArray::arrayWithObjects(nil, &[ns_png_type]);
+ pasteboard.inner.declareTypes_owner(types_array, nil);
+
+ let data = NSData::dataWithBytes_length_(
+ nil,
+ png_bytes.as_ptr() as *const c_void,
+ png_bytes.len() as u64,
+ );
+ pasteboard.inner.setData_forType(data, ns_png_type);
+ }
+
+ let item = pasteboard.read().expect("should read PNG image");
+
+ // Test Image entry exists
+ assert_eq!(item.entries.len(), 1);
+ match &item.entries[0] {
+ ClipboardEntry::Image(img) => {
+ assert_eq!(img.format, ImageFormat::Png);
+ assert_eq!(img.bytes, png_bytes);
+ }
+ other => panic!("expected Image, got {:?}", other),
+ }
+ }
}
@@ -7,8 +7,8 @@ use block::ConcreteBlock;
use cocoa::{
appkit::{
NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular,
- NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSSavePanel,
- NSVisualEffectState, NSVisualEffectView, NSWindow,
+ NSControl as _, NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel,
+ NSSavePanel, NSVisualEffectState, NSVisualEffectView, NSWindow,
},
base::{BOOL, NO, YES, id, nil, selector},
foundation::{
@@ -297,6 +297,7 @@ impl MacPlatform {
action,
os_action,
checked,
+ disabled,
} => {
// Note that this is intentionally using earlier bindings, whereas typically
// later ones take display precedence. See the discussion on
@@ -394,13 +395,18 @@ impl MacPlatform {
if *checked {
item.setState_(NSVisualEffectState::Active);
}
+ item.setEnabled_(if *disabled { NO } else { YES });
let tag = actions.len() as NSInteger;
let _: () = msg_send![item, setTag: tag];
actions.push(action.boxed_clone());
item
}
- MenuItem::Submenu(Menu { name, items }) => {
+ MenuItem::Submenu(Menu {
+ name,
+ items,
+ disabled,
+ }) => {
let item = NSMenuItem::new(nil).autorelease();
let submenu = NSMenu::new(nil).autorelease();
submenu.setDelegate_(delegate);
@@ -408,6 +414,7 @@ impl MacPlatform {
submenu.addItem_(Self::create_menu_item(item, delegate, actions, keymap));
}
item.setSubmenu_(submenu);
+ item.setEnabled_(if *disabled { NO } else { YES });
item.setTitle_(ns_string(name));
item
}
@@ -361,13 +361,22 @@ impl MacTextSystemState {
fn raster_bounds(&self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {
let font = &self.fonts[params.font_id.0];
let scale = Transform2F::from_scale(params.scale_factor);
- Ok(bounds_from_rect_i(font.raster_bounds(
+ let mut bounds: Bounds<DevicePixels> = bounds_from_rect_i(font.raster_bounds(
params.glyph_id.0,
params.font_size.into(),
scale,
HintingOptions::None,
font_kit::canvas::RasterizationOptions::GrayscaleAa,
- )?))
+ )?);
+
+ // Add 3% of font size as padding, clamped between 1 and 5 pixels
+ // to avoid clipping of anti-aliased edges.
+ let pad =
+ ((params.font_size.as_f32() * 0.03 * params.scale_factor).ceil() as i32).clamp(1, 5);
+ bounds.origin.x -= DevicePixels(pad);
+ bounds.size.width += DevicePixels(pad);
+
+ Ok(bounds)
}
fn rasterize_glyph(
@@ -78,11 +78,12 @@ impl WgpuContext {
#[cfg(target_family = "wasm")]
pub async fn new_web() -> anyhow::Result<Self> {
- let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor {
+ let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends: wgpu::Backends::BROWSER_WEBGPU | wgpu::Backends::GL,
flags: wgpu::InstanceFlags::default(),
backend_options: wgpu::BackendOptions::default(),
memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(),
+ display: None,
});
let adapter = instance
@@ -148,12 +149,13 @@ impl WgpuContext {
}
#[cfg(not(target_family = "wasm"))]
- pub fn instance() -> wgpu::Instance {
- wgpu::Instance::new(&wgpu::InstanceDescriptor {
+ pub fn instance(display: Box<dyn wgpu::wgt::WgpuHasDisplayHandle>) -> wgpu::Instance {
+ wgpu::Instance::new(wgpu::InstanceDescriptor {
backends: wgpu::Backends::VULKAN | wgpu::Backends::GL,
flags: wgpu::InstanceFlags::default(),
backend_options: wgpu::BackendOptions::default(),
memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(),
+ display: Some(display),
})
}
@@ -198,9 +200,8 @@ impl WgpuContext {
//
// 1. ZED_DEVICE_ID match — explicit user override
// 2. Compositor GPU match — the GPU the display server is rendering on
- // 3. Device type — WGPU HighPerformance order (Discrete > Integrated >
- // Other > Virtual > Cpu). "Other" ranks above "Virtual" because
- // backends like OpenGL may report real hardware as "Other".
+ // 3. Device type (Discrete > Integrated > Other > Virtual > Cpu).
+ // "Other" ranks above "Virtual" because OpenGL seems to count as "Other".
// 4. Backend — prefer Vulkan/Metal/Dx12 over GL/etc.
adapters.sort_by_key(|adapter| {
let info = adapter.get_info();
@@ -305,10 +306,7 @@ impl WgpuContext {
anyhow::bail!("no compatible alpha modes");
}
- // Create the real device with full features
let (device, queue, dual_source_blending) = Self::create_device(adapter).await?;
-
- // Use an error scope to capture any validation errors during configure
let error_scope = device.push_error_scope(wgpu::ErrorFilter::Validation);
let test_config = wgpu::SurfaceConfiguration {
@@ -324,7 +322,6 @@ impl WgpuContext {
surface.configure(&device, &test_config);
- // Check if there was a validation error
let error = error_scope.pop().await;
if let Some(e) = error {
anyhow::bail!("surface configuration failed: {e}");
@@ -163,21 +163,22 @@ impl WgpuRenderer {
/// The caller must ensure that the window handle remains valid for the lifetime
/// of the returned renderer.
#[cfg(not(target_family = "wasm"))]
- pub fn new<W: HasWindowHandle + HasDisplayHandle>(
+ pub fn new<W>(
gpu_context: GpuContext,
window: &W,
config: WgpuSurfaceConfig,
compositor_gpu: Option<CompositorGpuHint>,
- ) -> anyhow::Result<Self> {
+ ) -> anyhow::Result<Self>
+ where
+ W: HasWindowHandle + HasDisplayHandle + std::fmt::Debug + Send + Sync + Clone + 'static,
+ {
let window_handle = window
.window_handle()
.map_err(|e| anyhow::anyhow!("Failed to get window handle: {e}"))?;
- let display_handle = window
- .display_handle()
- .map_err(|e| anyhow::anyhow!("Failed to get display handle: {e}"))?;
let target = wgpu::SurfaceTargetUnsafe::RawHandle {
- raw_display_handle: display_handle.as_raw(),
+ // Fall back to the display handle already provided via InstanceDescriptor::display.
+ raw_display_handle: None,
raw_window_handle: window_handle.as_raw(),
};
@@ -188,7 +189,7 @@ impl WgpuRenderer {
.borrow()
.as_ref()
.map(|ctx| ctx.instance.clone())
- .unwrap_or_else(WgpuContext::instance);
+ .unwrap_or_else(|| WgpuContext::instance(Box::new(window.clone())));
// Safety: The caller guarantees that the window handle is valid for the
// lifetime of this renderer. In practice, the RawWindow struct is created
@@ -645,7 +646,7 @@ impl WgpuRenderer {
module: &wgpu::ShaderModule| {
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some(&format!("{name}_layout")),
- bind_group_layouts: &[globals_layout, data_layout],
+ bind_group_layouts: &[Some(globals_layout), Some(data_layout)],
immediate_size: 0,
});
@@ -1052,10 +1053,19 @@ impl WgpuRenderer {
self.atlas.before_frame();
- let texture_result = self.resources().surface.get_current_texture();
- let frame = match texture_result {
- Ok(frame) => frame,
- Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => {
+ let frame = match self.resources().surface.get_current_texture() {
+ wgpu::CurrentSurfaceTexture::Success(frame) => frame,
+ wgpu::CurrentSurfaceTexture::Suboptimal(frame) => {
+ // Textures must be destroyed before the surface can be reconfigured.
+ drop(frame);
+ let surface_config = self.surface_config.clone();
+ let resources = self.resources_mut();
+ resources
+ .surface
+ .configure(&resources.device, &surface_config);
+ return;
+ }
+ wgpu::CurrentSurfaceTexture::Lost | wgpu::CurrentSurfaceTexture::Outdated => {
let surface_config = self.surface_config.clone();
let resources = self.resources_mut();
resources
@@ -1063,9 +1073,12 @@ impl WgpuRenderer {
.configure(&resources.device, &surface_config);
return;
}
- Err(e) => {
+ wgpu::CurrentSurfaceTexture::Timeout | wgpu::CurrentSurfaceTexture::Occluded => {
+ return;
+ }
+ wgpu::CurrentSurfaceTexture::Validation => {
*self.last_error.lock().unwrap() =
- Some(format!("Failed to acquire surface texture: {e}"));
+ Some("Surface texture validation error".to_string());
return;
}
};
@@ -1609,7 +1622,9 @@ impl WgpuRenderer {
}
pub fn destroy(&mut self) {
- // wgpu resources are automatically cleaned up when dropped
+ // Release surface-bound GPU resources eagerly so the underlying native
+ // window can be destroyed before the renderer itself is dropped.
+ self.resources.take();
}
/// Returns true if the GPU device was lost and recovery is needed.
@@ -1625,11 +1640,10 @@ impl WgpuRenderer {
/// - The first window to call this will recreate the shared context
/// - Subsequent windows will adopt the already-recovered context
#[cfg(not(target_family = "wasm"))]
- pub fn recover(
- &mut self,
- raw_display_handle: raw_window_handle::RawDisplayHandle,
- raw_window_handle: raw_window_handle::RawWindowHandle,
- ) -> anyhow::Result<()> {
+ pub fn recover<W>(&mut self, window: &W) -> anyhow::Result<()>
+ where
+ W: HasWindowHandle + HasDisplayHandle + std::fmt::Debug + Send + Sync + Clone + 'static,
+ {
let gpu_context = self.context.as_ref().expect("recover requires gpu_context");
// Check if another window already recovered the context
@@ -1638,6 +1652,10 @@ impl WgpuRenderer {
.as_ref()
.is_none_or(|ctx| ctx.device_lost());
+ let window_handle = window
+ .window_handle()
+ .map_err(|e| anyhow::anyhow!("Failed to get window handle: {e}"))?;
+
let surface = if needs_new_context {
log::warn!("GPU device lost, recreating context...");
@@ -1648,15 +1666,15 @@ impl WgpuRenderer {
// Wait for GPU driver to stabilize (350ms copied from windows :shrug:)
std::thread::sleep(std::time::Duration::from_millis(350));
- let instance = WgpuContext::instance();
- let surface = create_surface(&instance, raw_display_handle, raw_window_handle)?;
+ let instance = WgpuContext::instance(Box::new(window.clone()));
+ let surface = create_surface(&instance, window_handle.as_raw())?;
let new_context = WgpuContext::new(instance, &surface, self.compositor_gpu)?;
*gpu_context.borrow_mut() = Some(new_context);
surface
} else {
let ctx_ref = gpu_context.borrow();
let instance = &ctx_ref.as_ref().unwrap().instance;
- create_surface(instance, raw_display_handle, raw_window_handle)?
+ create_surface(instance, window_handle.as_raw())?
};
let config = WgpuSurfaceConfig {
@@ -1691,13 +1709,13 @@ impl WgpuRenderer {
#[cfg(not(target_family = "wasm"))]
fn create_surface(
instance: &wgpu::Instance,
- raw_display_handle: raw_window_handle::RawDisplayHandle,
raw_window_handle: raw_window_handle::RawWindowHandle,
) -> anyhow::Result<wgpu::Surface<'static>> {
unsafe {
instance
.create_surface_unsafe(wgpu::SurfaceTargetUnsafe::RawHandle {
- raw_display_handle,
+ // Fall back to the display handle already provided via InstanceDescriptor::display.
+ raw_display_handle: None,
raw_window_handle,
})
.map_err(|e| anyhow::anyhow!("{e}"))
@@ -8,24 +8,22 @@ use windows::Win32::{
System::{
DataExchange::{
CloseClipboard, CountClipboardFormats, EmptyClipboard, EnumClipboardFormats,
- GetClipboardData, GetClipboardFormatNameW, IsClipboardFormatAvailable, OpenClipboard,
- RegisterClipboardFormatW, SetClipboardData,
+ GetClipboardData, GetClipboardFormatNameW, OpenClipboard, RegisterClipboardFormatW,
+ SetClipboardData,
},
Memory::{GMEM_MOVEABLE, GlobalAlloc, GlobalLock, GlobalSize, GlobalUnlock},
Ole::{CF_DIB, CF_HDROP, CF_UNICODETEXT},
},
UI::Shell::{DragQueryFileW, HDROP},
};
-use windows_core::PCWSTR;
+use windows::core::{Owned, PCWSTR};
use gpui::{
ClipboardEntry, ClipboardItem, ClipboardString, ExternalPaths, Image, ImageFormat, hash,
};
-// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew
const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF;
-// Clipboard formats
static CLIPBOARD_HASH_FORMAT: LazyLock<u32> =
LazyLock::new(|| register_clipboard_format(windows::core::w!("GPUI internal text hash")));
static CLIPBOARD_METADATA_FORMAT: LazyLock<u32> =
@@ -39,47 +37,94 @@ static CLIPBOARD_PNG_FORMAT: LazyLock<u32> =
static CLIPBOARD_JPG_FORMAT: LazyLock<u32> =
LazyLock::new(|| register_clipboard_format(windows::core::w!("JFIF")));
-// Helper maps and sets
-static FORMATS_MAP: LazyLock<FxHashMap<u32, ClipboardFormatType>> = LazyLock::new(|| {
- let mut formats_map = FxHashMap::default();
- formats_map.insert(CF_UNICODETEXT.0 as u32, ClipboardFormatType::Text);
- formats_map.insert(*CLIPBOARD_PNG_FORMAT, ClipboardFormatType::Image);
- formats_map.insert(*CLIPBOARD_GIF_FORMAT, ClipboardFormatType::Image);
- formats_map.insert(*CLIPBOARD_JPG_FORMAT, ClipboardFormatType::Image);
- formats_map.insert(*CLIPBOARD_SVG_FORMAT, ClipboardFormatType::Image);
- formats_map.insert(CF_DIB.0 as u32, ClipboardFormatType::Image);
- formats_map.insert(CF_HDROP.0 as u32, ClipboardFormatType::Files);
- formats_map
-});
static IMAGE_FORMATS_MAP: LazyLock<FxHashMap<u32, ImageFormat>> = LazyLock::new(|| {
- let mut formats_map = FxHashMap::default();
- formats_map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png);
- formats_map.insert(*CLIPBOARD_GIF_FORMAT, ImageFormat::Gif);
- formats_map.insert(*CLIPBOARD_JPG_FORMAT, ImageFormat::Jpeg);
- formats_map.insert(*CLIPBOARD_SVG_FORMAT, ImageFormat::Svg);
- formats_map
+ let mut map = FxHashMap::default();
+ map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png);
+ map.insert(*CLIPBOARD_GIF_FORMAT, ImageFormat::Gif);
+ map.insert(*CLIPBOARD_JPG_FORMAT, ImageFormat::Jpeg);
+ map.insert(*CLIPBOARD_SVG_FORMAT, ImageFormat::Svg);
+ map
});
-#[derive(Debug, Clone, Copy)]
-enum ClipboardFormatType {
- Text,
- Image,
- Files,
+fn register_clipboard_format(format: PCWSTR) -> u32 {
+ let ret = unsafe { RegisterClipboardFormatW(format) };
+ if ret == 0 {
+ panic!(
+ "Error when registering clipboard format: {}",
+ std::io::Error::last_os_error()
+ );
+ }
+ log::debug!(
+ "Registered clipboard format {} as {}",
+ unsafe { format.display() },
+ ret
+ );
+ ret
+}
+
+fn get_clipboard_data(format: u32) -> Option<LockedGlobal> {
+ let global = HGLOBAL(unsafe { GetClipboardData(format).ok() }?.0);
+ LockedGlobal::lock(global)
}
pub(crate) fn write_to_clipboard(item: ClipboardItem) {
- with_clipboard(|| write_to_clipboard_inner(item));
+ let Some(_clip) = ClipboardGuard::open() else {
+ return;
+ };
+
+ let result: Result<()> = (|| {
+ unsafe { EmptyClipboard()? };
+ for entry in item.entries() {
+ match entry {
+ ClipboardEntry::String(string) => write_string(string)?,
+ ClipboardEntry::Image(image) => write_image(image)?,
+ ClipboardEntry::ExternalPaths(_) => {}
+ }
+ }
+ Ok(())
+ })();
+
+ if let Err(e) = result {
+ log::error!("Failed to write to clipboard: {e}");
+ }
}
pub(crate) fn read_from_clipboard() -> Option<ClipboardItem> {
- with_clipboard(|| {
- with_best_match_format(|item_format| match format_to_type(item_format) {
- ClipboardFormatType::Text => read_string_from_clipboard(),
- ClipboardFormatType::Image => read_image_from_clipboard(item_format),
- ClipboardFormatType::Files => read_files_from_clipboard(),
- })
- })
- .flatten()
+ let _clip = ClipboardGuard::open()?;
+
+ let mut entries = Vec::new();
+ let mut have_text = false;
+ let mut have_image = false;
+ let mut have_files = false;
+
+ let count = unsafe { CountClipboardFormats() };
+ let mut format = 0;
+ for _ in 0..count {
+ format = unsafe { EnumClipboardFormats(format) };
+
+ if !have_text && format == CF_UNICODETEXT.0 as u32 {
+ if let Some(entry) = read_string() {
+ entries.push(entry);
+ have_text = true;
+ }
+ } else if !have_image && is_image_format(format) {
+ if let Some(entry) = read_image(format) {
+ entries.push(entry);
+ have_image = true;
+ }
+ } else if !have_files && format == CF_HDROP.0 as u32 {
+ if let Some(entry) = read_files() {
+ entries.push(entry);
+ have_files = true;
+ }
+ }
+ }
+
+ if entries.is_empty() {
+ log_unsupported_clipboard_formats();
+ return None;
+ }
+ Some(ClipboardItem { entries })
}
pub(crate) fn with_file_names<F>(hdrop: HDROP, mut f: F)
@@ -97,359 +142,247 @@ where
}
match String::from_utf16(&buffer[0..filename_length]) {
Ok(file_name) => f(file_name),
- Err(e) => {
- log::error!("dragged file name is not UTF-16: {}", e)
- }
+ Err(e) => log::error!("dragged file name is not UTF-16: {}", e),
}
}
}
-fn with_clipboard<F, T>(f: F) -> Option<T>
-where
- F: FnOnce() -> T,
-{
- match unsafe { OpenClipboard(None) } {
- Ok(()) => {
- let result = f();
- if let Err(e) = unsafe { CloseClipboard() } {
- log::error!("Failed to close clipboard: {e}",);
- }
- Some(result)
- }
- Err(e) => {
- log::error!("Failed to open clipboard: {e}",);
- None
- }
+fn set_clipboard_bytes<T>(data: &[T], format: u32) -> Result<()> {
+ unsafe {
+ let global = Owned::new(GlobalAlloc(GMEM_MOVEABLE, std::mem::size_of_val(data))?);
+ let ptr = GlobalLock(*global);
+ anyhow::ensure!(!ptr.is_null(), "GlobalLock returned null");
+ std::ptr::copy_nonoverlapping(data.as_ptr(), ptr as _, data.len());
+ GlobalUnlock(*global).ok();
+ SetClipboardData(format, Some(HANDLE(global.0)))?;
+ // SetClipboardData succeeded — the system now owns the memory.
+ std::mem::forget(global);
}
+ Ok(())
}
-fn register_clipboard_format(format: PCWSTR) -> u32 {
- let ret = unsafe { RegisterClipboardFormatW(format) };
- if ret == 0 {
- panic!(
- "Error when registering clipboard format: {}",
- std::io::Error::last_os_error()
- );
+fn get_clipboard_string(format: u32) -> Option<String> {
+ let locked = get_clipboard_data(format)?;
+ let bytes = locked.as_bytes();
+ let words_len = bytes.len() / std::mem::size_of::<u16>();
+ if words_len == 0 {
+ return Some(String::new());
}
- log::debug!(
- "Registered clipboard format {} as {}",
- unsafe { format.display() },
- ret
- );
- ret
+ let slice = unsafe { std::slice::from_raw_parts(bytes.as_ptr() as *const u16, words_len) };
+ let actual_len = slice.iter().position(|&c| c == 0).unwrap_or(words_len);
+ Some(String::from_utf16_lossy(&slice[..actual_len]))
}
-#[inline]
-fn format_to_type(item_format: u32) -> &'static ClipboardFormatType {
- FORMATS_MAP.get(&item_format).unwrap()
-}
-
-// Currently, we only write the first item.
-fn write_to_clipboard_inner(item: ClipboardItem) -> Result<()> {
- unsafe {
- EmptyClipboard()?;
- }
- match item.entries().first() {
- Some(entry) => match entry {
- ClipboardEntry::String(string) => {
- write_string_to_clipboard(string)?;
- }
- ClipboardEntry::Image(image) => {
- write_image_to_clipboard(image)?;
- }
- ClipboardEntry::ExternalPaths(_) => {}
- },
- None => {
- // Writing an empty list of entries just clears the clipboard.
- }
- }
- Ok(())
+fn is_image_format(format: u32) -> bool {
+ IMAGE_FORMATS_MAP.contains_key(&format) || format == CF_DIB.0 as u32
}
-fn write_string_to_clipboard(item: &ClipboardString) -> Result<()> {
- let encode_wide = item.text.encode_utf16().chain(Some(0)).collect_vec();
- set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?;
+fn write_string(item: &ClipboardString) -> Result<()> {
+ let wide: Vec<u16> = item.text.encode_utf16().chain(Some(0)).collect_vec();
+ set_clipboard_bytes(&wide, CF_UNICODETEXT.0 as u32)?;
if let Some(metadata) = item.metadata.as_ref() {
- let hash_result = {
- let hash = ClipboardString::text_hash(&item.text);
- hash.to_ne_bytes()
- };
- let encode_wide =
- unsafe { std::slice::from_raw_parts(hash_result.as_ptr().cast::<u16>(), 4) };
- set_data_to_clipboard(encode_wide, *CLIPBOARD_HASH_FORMAT)?;
-
- let metadata_wide = metadata.encode_utf16().chain(Some(0)).collect_vec();
- set_data_to_clipboard(&metadata_wide, *CLIPBOARD_METADATA_FORMAT)?;
+ let hash_bytes = ClipboardString::text_hash(&item.text).to_ne_bytes();
+ set_clipboard_bytes(&hash_bytes, *CLIPBOARD_HASH_FORMAT)?;
+
+ let wide: Vec<u16> = metadata.encode_utf16().chain(Some(0)).collect_vec();
+ set_clipboard_bytes(&wide, *CLIPBOARD_METADATA_FORMAT)?;
}
Ok(())
}
-fn set_data_to_clipboard<T>(data: &[T], format: u32) -> Result<()> {
- unsafe {
- let global = GlobalAlloc(GMEM_MOVEABLE, std::mem::size_of_val(data))?;
- let handle = GlobalLock(global);
- std::ptr::copy_nonoverlapping(data.as_ptr(), handle as _, data.len());
- let _ = GlobalUnlock(global);
- SetClipboardData(format, Some(HANDLE(global.0)))?;
+fn write_image(item: &Image) -> Result<()> {
+ let native_format = match item.format {
+ ImageFormat::Svg => Some(*CLIPBOARD_SVG_FORMAT),
+ ImageFormat::Gif => Some(*CLIPBOARD_GIF_FORMAT),
+ ImageFormat::Png => Some(*CLIPBOARD_PNG_FORMAT),
+ ImageFormat::Jpeg => Some(*CLIPBOARD_JPG_FORMAT),
+ _ => None,
+ };
+ if let Some(format) = native_format {
+ set_clipboard_bytes(item.bytes(), format)?;
}
- Ok(())
-}
-// Here writing PNG to the clipboard to better support other apps. For more info, please ref to
-// the PR.
-fn write_image_to_clipboard(item: &Image) -> Result<()> {
- match item.format {
- ImageFormat::Svg => set_data_to_clipboard(item.bytes(), *CLIPBOARD_SVG_FORMAT)?,
- ImageFormat::Gif => {
- set_data_to_clipboard(item.bytes(), *CLIPBOARD_GIF_FORMAT)?;
- let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Gif)?;
- set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?;
- }
- ImageFormat::Png => {
- set_data_to_clipboard(item.bytes(), *CLIPBOARD_PNG_FORMAT)?;
- let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Png)?;
- set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?;
- }
- ImageFormat::Jpeg => {
- set_data_to_clipboard(item.bytes(), *CLIPBOARD_JPG_FORMAT)?;
- let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Jpeg)?;
- set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?;
- }
- other => {
- log::warn!(
- "Clipboard unsupported image format: {:?}, convert to PNG instead.",
- item.format
- );
- let png_bytes = convert_image_to_png_format(item.bytes(), other)?;
- set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?;
+ // Also provide a PNG copy for broad compatibility.
+ // SVG can't be rasterized by the image crate, so skip it.
+ if item.format != ImageFormat::Svg && native_format != Some(*CLIPBOARD_PNG_FORMAT) {
+ if let Some(png_bytes) = convert_to_png(item.bytes(), item.format) {
+ set_clipboard_bytes(&png_bytes, *CLIPBOARD_PNG_FORMAT)?;
}
}
Ok(())
}
-fn convert_image_to_png_format(bytes: &[u8], image_format: ImageFormat) -> Result<Vec<u8>> {
- let image =
- image::load_from_memory_with_format(bytes, gpui_image_format_to_image(image_format))?;
- let mut output_buf = Vec::new();
- image.write_to(
- &mut std::io::Cursor::new(&mut output_buf),
- image::ImageFormat::Png,
- )?;
- Ok(output_buf)
-}
-
-// Here, we enumerate all formats on the clipboard and find the first one that we can process.
-// The reason we don't use `GetPriorityClipboardFormat` is that it sometimes returns the
-// wrong format.
-// For instance, when copying a JPEG image from Microsoft Word, there may be several formats
-// on the clipboard: Jpeg, Png, Svg.
-// If we use `GetPriorityClipboardFormat`, it will return Svg, which is not what we want.
-fn with_best_match_format<F>(f: F) -> Option<ClipboardItem>
-where
- F: Fn(u32) -> Option<ClipboardEntry>,
-{
- let mut text = None;
- let mut image = None;
- let mut files = None;
- let count = unsafe { CountClipboardFormats() };
- let mut clipboard_format = 0;
- for _ in 0..count {
- clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) };
- let Some(item_format) = FORMATS_MAP.get(&clipboard_format) else {
- continue;
- };
- let bucket = match item_format {
- ClipboardFormatType::Text if text.is_none() => &mut text,
- ClipboardFormatType::Image if image.is_none() => &mut image,
- ClipboardFormatType::Files if files.is_none() => &mut files,
- _ => continue,
- };
- if let Some(entry) = f(clipboard_format) {
- *bucket = Some(entry);
- }
- }
-
- if let Some(entry) = [image, files, text].into_iter().flatten().next() {
- return Some(ClipboardItem {
- entries: vec![entry],
- });
- }
-
- // log the formats that we don't support yet.
- {
- clipboard_format = 0;
- for _ in 0..count {
- clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) };
- let mut buffer = [0u16; 64];
- unsafe { GetClipboardFormatNameW(clipboard_format, &mut buffer) };
- let format_name = String::from_utf16_lossy(&buffer);
- log::warn!(
- "Try to paste with unsupported clipboard format: {}, {}.",
- clipboard_format,
- format_name
- );
- }
- }
- None
+fn convert_to_png(bytes: &[u8], format: ImageFormat) -> Option<Vec<u8>> {
+ let img_format = gpui_to_image_format(format)?;
+ let image = image::load_from_memory_with_format(bytes, img_format)
+ .map_err(|e| log::warn!("Failed to decode image for PNG conversion: {e}"))
+ .ok()?;
+ let mut buf = Vec::new();
+ image
+ .write_to(&mut std::io::Cursor::new(&mut buf), image::ImageFormat::Png)
+ .map_err(|e| log::warn!("Failed to encode PNG: {e}"))
+ .ok()?;
+ Some(buf)
}
-fn read_string_from_clipboard() -> Option<ClipboardEntry> {
- let text = with_clipboard_data(CF_UNICODETEXT.0 as u32, |data_ptr, _| {
- let pcwstr = PCWSTR(data_ptr as *const u16);
- String::from_utf16_lossy(unsafe { pcwstr.as_wide() })
- })?;
- let Some(hash) = read_hash_from_clipboard() else {
- return Some(ClipboardEntry::String(ClipboardString::new(text)));
- };
- let Some(metadata) = read_metadata_from_clipboard() else {
- return Some(ClipboardEntry::String(ClipboardString::new(text)));
- };
- if hash == ClipboardString::text_hash(&text) {
- Some(ClipboardEntry::String(ClipboardString {
- text,
- metadata: Some(metadata),
- }))
- } else {
- Some(ClipboardEntry::String(ClipboardString::new(text)))
- }
+fn read_string() -> Option<ClipboardEntry> {
+ let text = get_clipboard_string(CF_UNICODETEXT.0 as u32)?;
+ let metadata = read_clipboard_metadata(&text);
+ Some(ClipboardEntry::String(ClipboardString { text, metadata }))
}
-fn read_hash_from_clipboard() -> Option<u64> {
- if unsafe { IsClipboardFormatAvailable(*CLIPBOARD_HASH_FORMAT).is_err() } {
+fn read_clipboard_metadata(text: &str) -> Option<String> {
+ let locked = get_clipboard_data(*CLIPBOARD_HASH_FORMAT)?;
+ let hash_bytes: [u8; 8] = locked.as_bytes().get(..8)?.try_into().ok()?;
+ let hash = u64::from_ne_bytes(hash_bytes);
+ if hash != ClipboardString::text_hash(text) {
return None;
}
- with_clipboard_data(*CLIPBOARD_HASH_FORMAT, |data_ptr, size| {
- if size < 8 {
- return None;
- }
- let hash_bytes: [u8; 8] = unsafe {
- std::slice::from_raw_parts(data_ptr.cast::<u8>(), 8)
- .try_into()
- .ok()
- }?;
- Some(u64::from_ne_bytes(hash_bytes))
- })?
+ get_clipboard_string(*CLIPBOARD_METADATA_FORMAT)
}
-fn read_metadata_from_clipboard() -> Option<String> {
- unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).ok()? };
- with_clipboard_data(*CLIPBOARD_METADATA_FORMAT, |data_ptr, _size| {
- let pcwstr = PCWSTR(data_ptr as *const u16);
- String::from_utf16_lossy(unsafe { pcwstr.as_wide() })
- })
+fn read_image(format: u32) -> Option<ClipboardEntry> {
+ let locked = get_clipboard_data(format)?;
+ let (bytes, image_format) = if format == CF_DIB.0 as u32 {
+ (convert_dib_to_bmp(locked.as_bytes())?, ImageFormat::Bmp)
+ } else {
+ let image_format = *IMAGE_FORMATS_MAP.get(&format)?;
+ (locked.as_bytes().to_vec(), image_format)
+ };
+ let id = hash(&bytes);
+ Some(ClipboardEntry::Image(Image {
+ format: image_format,
+ bytes,
+ id,
+ }))
}
-fn read_image_from_clipboard(format: u32) -> Option<ClipboardEntry> {
- // Handle CF_DIB format specially - it's raw bitmap data that needs conversion
- if format == CF_DIB.0 as u32 {
- return read_image_for_type(format, ImageFormat::Bmp, Some(convert_dib_to_bmp));
- }
- let image_format = format_number_to_image_format(format)?;
- read_image_for_type::<fn(&[u8]) -> Option<Vec<u8>>>(format, *image_format, None)
+fn read_files() -> Option<ClipboardEntry> {
+ let locked = get_clipboard_data(CF_HDROP.0 as u32)?;
+ let hdrop = HDROP(locked.ptr as *mut _);
+ let mut filenames = Vec::new();
+ with_file_names(hdrop, |name| filenames.push(std::path::PathBuf::from(name)));
+ Some(ClipboardEntry::ExternalPaths(ExternalPaths(
+ filenames.into(),
+ )))
}
-/// Convert DIB data to BMP file format.
-/// DIB is essentially BMP without a file header, so we just need to add the 14-byte BITMAPFILEHEADER.
-fn convert_dib_to_bmp(dib_data: &[u8]) -> Option<Vec<u8>> {
- if dib_data.len() < 40 {
+/// DIB is BMP without the 14-byte BITMAPFILEHEADER. Prepend one.
+fn convert_dib_to_bmp(dib: &[u8]) -> Option<Vec<u8>> {
+ if dib.len() < 40 {
return None;
}
- let file_size = 14 + dib_data.len() as u32;
- // Calculate pixel data offset
- let header_size = u32::from_le_bytes(dib_data[0..4].try_into().ok()?);
- let bit_count = u16::from_le_bytes(dib_data[14..16].try_into().ok()?);
- let compression = u32::from_le_bytes(dib_data[16..20].try_into().ok()?);
+ let header_size = u32::from_le_bytes(dib[0..4].try_into().ok()?);
+ let bit_count = u16::from_le_bytes(dib[14..16].try_into().ok()?);
+ let compression = u32::from_le_bytes(dib[16..20].try_into().ok()?);
- // Calculate color table size
let color_table_size = if bit_count <= 8 {
- let colors_used = u32::from_le_bytes(dib_data[32..36].try_into().ok()?);
- let num_colors = if colors_used == 0 {
+ let colors_used = u32::from_le_bytes(dib[32..36].try_into().ok()?);
+ (if colors_used == 0 {
1u32 << bit_count
} else {
colors_used
- };
- num_colors * 4
+ }) * 4
} else if compression == 3 {
12 // BI_BITFIELDS
} else {
0
};
- let pixel_data_offset = 14 + header_size + color_table_size;
+ let pixel_offset = 14 + header_size + color_table_size;
+ let file_size = 14 + dib.len() as u32;
- // Build BITMAPFILEHEADER (14 bytes)
- let mut bmp_data = Vec::with_capacity(file_size as usize);
- bmp_data.extend_from_slice(b"BM"); // Signature
- bmp_data.extend_from_slice(&file_size.to_le_bytes()); // File size
- bmp_data.extend_from_slice(&[0u8; 4]); // Reserved
- bmp_data.extend_from_slice(&pixel_data_offset.to_le_bytes()); // Pixel data offset
- bmp_data.extend_from_slice(dib_data); // DIB data
+ let mut bmp = Vec::with_capacity(file_size as usize);
+ bmp.extend_from_slice(b"BM");
+ bmp.extend_from_slice(&file_size.to_le_bytes());
+ bmp.extend_from_slice(&[0u8; 4]); // reserved
+ bmp.extend_from_slice(&pixel_offset.to_le_bytes());
+ bmp.extend_from_slice(dib);
+ Some(bmp)
+}
- Some(bmp_data)
+fn log_unsupported_clipboard_formats() {
+ let count = unsafe { CountClipboardFormats() };
+ let mut format = 0;
+ for _ in 0..count {
+ format = unsafe { EnumClipboardFormats(format) };
+ let mut buffer = [0u16; 64];
+ unsafe { GetClipboardFormatNameW(format, &mut buffer) };
+ let format_name = String::from_utf16_lossy(&buffer);
+ log::warn!(
+ "Try to paste with unsupported clipboard format: {}, {}.",
+ format,
+ format_name
+ );
+ }
}
-#[inline]
-fn format_number_to_image_format(format_number: u32) -> Option<&'static ImageFormat> {
- IMAGE_FORMATS_MAP.get(&format_number)
+fn gpui_to_image_format(value: ImageFormat) -> Option<image::ImageFormat> {
+ match value {
+ ImageFormat::Png => Some(image::ImageFormat::Png),
+ ImageFormat::Jpeg => Some(image::ImageFormat::Jpeg),
+ ImageFormat::Webp => Some(image::ImageFormat::WebP),
+ ImageFormat::Gif => Some(image::ImageFormat::Gif),
+ ImageFormat::Bmp => Some(image::ImageFormat::Bmp),
+ ImageFormat::Tiff => Some(image::ImageFormat::Tiff),
+ other => {
+ log::warn!("No image crate equivalent for format: {other:?}");
+ None
+ }
+ }
}
-fn read_image_for_type<F>(
- format_number: u32,
- format: ImageFormat,
- convert: Option<F>,
-) -> Option<ClipboardEntry>
-where
- F: FnOnce(&[u8]) -> Option<Vec<u8>>,
-{
- let (bytes, id) = with_clipboard_data(format_number, |data_ptr, size| {
- let raw_bytes = unsafe { std::slice::from_raw_parts(data_ptr as *const u8, size) };
- let bytes = match convert {
- Some(converter) => converter(raw_bytes)?,
- None => raw_bytes.to_vec(),
- };
- let id = hash(&bytes);
- Some((bytes, id))
- })??;
- Some(ClipboardEntry::Image(Image { format, bytes, id }))
+struct ClipboardGuard;
+
+impl ClipboardGuard {
+ fn open() -> Option<Self> {
+ match unsafe { OpenClipboard(None) } {
+ Ok(()) => Some(Self),
+ Err(e) => {
+ log::error!("Failed to open clipboard: {e}");
+ None
+ }
+ }
+ }
}
-fn read_files_from_clipboard() -> Option<ClipboardEntry> {
- let filenames = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr, _size| {
- let hdrop = HDROP(data_ptr);
- let mut filenames = Vec::new();
- with_file_names(hdrop, |file_name| {
- filenames.push(std::path::PathBuf::from(file_name));
- });
- filenames
- })?;
- Some(ClipboardEntry::ExternalPaths(ExternalPaths(
- filenames.into(),
- )))
+impl Drop for ClipboardGuard {
+ fn drop(&mut self) {
+ if let Err(e) = unsafe { CloseClipboard() } {
+ log::error!("Failed to close clipboard: {e}");
+ }
+ }
}
-fn with_clipboard_data<F, R>(format: u32, f: F) -> Option<R>
-where
- F: FnOnce(*mut std::ffi::c_void, usize) -> R,
-{
- let global = HGLOBAL(unsafe { GetClipboardData(format).ok() }?.0);
- let size = unsafe { GlobalSize(global) };
- let data_ptr = unsafe { GlobalLock(global) };
- let result = f(data_ptr, size);
- unsafe { GlobalUnlock(global).ok() };
- Some(result)
+struct LockedGlobal {
+ global: HGLOBAL,
+ ptr: *const u8,
+ size: usize,
}
-fn gpui_image_format_to_image(value: ImageFormat) -> image::ImageFormat {
- match value {
- ImageFormat::Png => image::ImageFormat::Png,
- ImageFormat::Jpeg => image::ImageFormat::Jpeg,
- ImageFormat::Webp => image::ImageFormat::WebP,
- ImageFormat::Gif => image::ImageFormat::Gif,
- // TODO: ImageFormat::Svg
- ImageFormat::Bmp => image::ImageFormat::Bmp,
- ImageFormat::Tiff => image::ImageFormat::Tiff,
- _ => unreachable!(),
+impl LockedGlobal {
+ fn lock(global: HGLOBAL) -> Option<Self> {
+ let size = unsafe { GlobalSize(global) };
+ let ptr = unsafe { GlobalLock(global) };
+ if ptr.is_null() {
+ return None;
+ }
+ Some(Self {
+ global,
+ ptr: ptr as *const u8,
+ size,
+ })
+ }
+
+ fn as_bytes(&self) -> &[u8] {
+ unsafe { std::slice::from_raw_parts(self.ptr, self.size) }
+ }
+}
+
+impl Drop for LockedGlobal {
+ fn drop(&mut self) {
+ unsafe { GlobalUnlock(self.global).ok() };
}
}
@@ -134,6 +134,7 @@ pub enum IconName {
Flame,
Folder,
FolderOpen,
+ FolderPlus,
FolderSearch,
Font,
FontSize,
@@ -150,6 +151,7 @@ pub enum IconName {
GitCommit,
GitGraph,
GitMergeConflict,
+ GitWorktree,
Github,
Hash,
HistoryRerun,
@@ -218,6 +220,9 @@ pub enum IconName {
Settings,
ShieldCheck,
Shift,
+ SignalHigh,
+ SignalLow,
+ SignalMedium,
Slash,
Sliders,
Space,
@@ -16,7 +16,7 @@ use gpui::{
WeakEntity, Window, actions, checkerboard, div, img, point, px, size,
};
use language::File as _;
-use persistence::IMAGE_VIEWER;
+use persistence::ImageViewerDb;
use project::{ImageItem, Project, ProjectPath, image_store::ImageItemEvent};
use settings::Settings;
use theme::ThemeSettings;
@@ -600,8 +600,9 @@ impl SerializableItem for ImageView {
window: &mut Window,
cx: &mut App,
) -> Task<anyhow::Result<Entity<Self>>> {
+ let db = ImageViewerDb::global(cx);
window.spawn(cx, async move |cx| {
- let image_path = IMAGE_VIEWER
+ let image_path = db
.get_image_path(item_id, workspace_id)?
.context("No image path found")?;
@@ -634,13 +635,8 @@ impl SerializableItem for ImageView {
_window: &mut Window,
cx: &mut App,
) -> Task<anyhow::Result<()>> {
- delete_unloaded_items(
- alive_items,
- workspace_id,
- "image_viewers",
- &IMAGE_VIEWER,
- cx,
- )
+ let db = ImageViewerDb::global(cx);
+ delete_unloaded_items(alive_items, workspace_id, "image_viewers", &db, cx)
}
fn serialize(
@@ -654,12 +650,11 @@ impl SerializableItem for ImageView {
let workspace_id = workspace.database_id()?;
let image_path = self.image_item.read(cx).abs_path(cx)?;
+ let db = ImageViewerDb::global(cx);
Some(cx.background_spawn({
async move {
log::debug!("Saving image at path {image_path:?}");
- IMAGE_VIEWER
- .save_image_path(item_id, workspace_id, image_path)
- .await
+ db.save_image_path(item_id, workspace_id, image_path).await
}
}))
}
@@ -910,7 +905,7 @@ mod persistence {
)];
}
- db::static_connection!(IMAGE_VIEWER, ImageViewerDb, [WorkspaceDb]);
+ db::static_connection!(ImageViewerDb, [WorkspaceDb]);
impl ImageViewerDb {
query! {
@@ -39,7 +39,7 @@ use ui_input::InputField;
use util::ResultExt;
use workspace::{
Item, ModalView, SerializableItem, Workspace, notifications::NotifyTaskExt as _,
- register_serializable_item,
+ register_serializable_item, with_active_or_new_workspace,
};
pub use ui_components::*;
@@ -47,7 +47,7 @@ use zed_actions::{ChangeKeybinding, OpenKeymap};
use crate::{
action_completion_provider::ActionCompletionProvider,
- persistence::KEYBINDING_EDITORS,
+ persistence::KeybindingEditorDb,
ui_components::keystroke_input::{
ClearKeystrokes, KeystrokeInput, StartRecording, StopRecording,
},
@@ -128,14 +128,16 @@ pub fn init(cx: &mut App) {
}
}
+ cx.on_action(|_: &OpenKeymap, cx| {
+ with_active_or_new_workspace(cx, |workspace, window, cx| {
+ open_keymap_editor(None, workspace, window, cx);
+ });
+ });
+
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
- workspace
- .register_action(|workspace, _: &OpenKeymap, window, cx| {
- open_keymap_editor(None, workspace, window, cx);
- })
- .register_action(|workspace, action: &ChangeKeybinding, window, cx| {
- open_keymap_editor(Some(action.action.clone()), workspace, window, cx);
- });
+ workspace.register_action(|workspace, action: &ChangeKeybinding, window, cx| {
+ open_keymap_editor(Some(action.action.clone()), workspace, window, cx);
+ });
})
.detach();
@@ -3818,13 +3820,8 @@ impl SerializableItem for KeymapEditor {
_window: &mut Window,
cx: &mut App,
) -> gpui::Task<gpui::Result<()>> {
- workspace::delete_unloaded_items(
- alive_items,
- workspace_id,
- "keybinding_editors",
- &KEYBINDING_EDITORS,
- cx,
- )
+ let db = KeybindingEditorDb::global(cx);
+ workspace::delete_unloaded_items(alive_items, workspace_id, "keybinding_editors", &db, cx)
}
fn deserialize(
@@ -3835,11 +3832,9 @@ impl SerializableItem for KeymapEditor {
window: &mut Window,
cx: &mut App,
) -> gpui::Task<gpui::Result<Entity<Self>>> {
+ let db = KeybindingEditorDb::global(cx);
window.spawn(cx, async move |cx| {
- if KEYBINDING_EDITORS
- .get_keybinding_editor(item_id, workspace_id)?
- .is_some()
- {
+ if db.get_keybinding_editor(item_id, workspace_id)?.is_some() {
cx.update(|window, cx| cx.new(|cx| KeymapEditor::new(workspace, window, cx)))
} else {
Err(anyhow!("No keybinding editor to deserialize"))
@@ -3856,11 +3851,10 @@ impl SerializableItem for KeymapEditor {
cx: &mut ui::Context<Self>,
) -> Option<gpui::Task<gpui::Result<()>>> {
let workspace_id = workspace.database_id()?;
- Some(cx.background_spawn(async move {
- KEYBINDING_EDITORS
- .save_keybinding_editor(item_id, workspace_id)
- .await
- }))
+ let db = KeybindingEditorDb::global(cx);
+ Some(cx.background_spawn(
+ async move { db.save_keybinding_editor(item_id, workspace_id).await },
+ ))
}
fn should_serialize(&self, _event: &Self::Event) -> bool {
@@ -3889,7 +3883,7 @@ mod persistence {
)];
}
- db::static_connection!(KEYBINDING_EDITORS, KeybindingEditorDb, [WorkspaceDb]);
+ db::static_connection!(KeybindingEditorDb, [WorkspaceDb]);
impl KeybindingEditorDb {
query! {
@@ -4610,7 +4610,7 @@ impl BufferSnapshot {
continue;
}
- let mut all_brackets: Vec<(BracketMatch<usize>, bool)> = Vec::new();
+ let mut all_brackets: Vec<(BracketMatch<usize>, usize, bool)> = Vec::new();
let mut opens = Vec::new();
let mut color_pairs = Vec::new();
@@ -4636,8 +4636,9 @@ impl BufferSnapshot {
let mut open = None;
let mut close = None;
let syntax_layer_depth = mat.depth;
+ let pattern_index = mat.pattern_index;
let config = configs[mat.grammar_index];
- let pattern = &config.patterns[mat.pattern_index];
+ let pattern = &config.patterns[pattern_index];
for capture in mat.captures {
if capture.index == config.open_capture_ix {
open = Some(capture.node.byte_range());
@@ -4658,7 +4659,7 @@ impl BufferSnapshot {
}
open_to_close_ranges
- .entry((open_range.start, open_range.end))
+ .entry((open_range.start, open_range.end, pattern_index))
.or_insert_with(BTreeMap::new)
.insert(
(close_range.start, close_range.end),
@@ -4679,6 +4680,7 @@ impl BufferSnapshot {
newline_only: pattern.newline_only,
color_index: None,
},
+ pattern_index,
pattern.rainbow_exclude,
));
}
@@ -4692,22 +4694,43 @@ impl BufferSnapshot {
// For each close, we know the expected open_len from tree-sitter matches.
// Map each close to its expected open length (for inferring opens)
- let close_to_open_len: HashMap<(usize, usize), usize> = all_brackets
+ let close_to_open_len: HashMap<(usize, usize, usize), usize> = all_brackets
.iter()
- .map(|(m, _)| ((m.close_range.start, m.close_range.end), m.open_range.len()))
+ .map(|(bracket_match, pattern_index, _)| {
+ (
+ (
+ bracket_match.close_range.start,
+ bracket_match.close_range.end,
+ *pattern_index,
+ ),
+ bracket_match.open_range.len(),
+ )
+ })
.collect();
// Collect unique opens and closes within this chunk
- let mut unique_opens: HashSet<(usize, usize)> = all_brackets
+ let mut unique_opens: HashSet<(usize, usize, usize)> = all_brackets
.iter()
- .map(|(m, _)| (m.open_range.start, m.open_range.end))
- .filter(|(start, _)| chunk_range.contains(start))
+ .map(|(bracket_match, pattern_index, _)| {
+ (
+ bracket_match.open_range.start,
+ bracket_match.open_range.end,
+ *pattern_index,
+ )
+ })
+ .filter(|(start, _, _)| chunk_range.contains(start))
.collect();
- let mut unique_closes: Vec<(usize, usize)> = all_brackets
+ let mut unique_closes: Vec<(usize, usize, usize)> = all_brackets
.iter()
- .map(|(m, _)| (m.close_range.start, m.close_range.end))
- .filter(|(start, _)| chunk_range.contains(start))
+ .map(|(bracket_match, pattern_index, _)| {
+ (
+ bracket_match.close_range.start,
+ bracket_match.close_range.end,
+ *pattern_index,
+ )
+ })
+ .filter(|(start, _, _)| chunk_range.contains(start))
.collect();
unique_closes.sort();
unique_closes.dedup();
@@ -4716,8 +4739,9 @@ impl BufferSnapshot {
let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
unique_opens_vec.sort();
- let mut valid_pairs: HashSet<((usize, usize), (usize, usize))> = HashSet::default();
- let mut open_stack: Vec<(usize, usize)> = Vec::new();
+ let mut valid_pairs: HashSet<((usize, usize, usize), (usize, usize, usize))> =
+ HashSet::default();
+ let mut open_stacks: HashMap<usize, Vec<(usize, usize)>> = HashMap::default();
let mut open_idx = 0;
for close in &unique_closes {
@@ -4725,36 +4749,53 @@ impl BufferSnapshot {
while open_idx < unique_opens_vec.len()
&& unique_opens_vec[open_idx].0 < close.0
{
- open_stack.push(unique_opens_vec[open_idx]);
+ let (start, end, pattern_index) = unique_opens_vec[open_idx];
+ open_stacks
+ .entry(pattern_index)
+ .or_default()
+ .push((start, end));
open_idx += 1;
}
// Try to match with most recent open
- if let Some(open) = open_stack.pop() {
- valid_pairs.insert((open, *close));
+ let (close_start, close_end, pattern_index) = *close;
+ if let Some(open) = open_stacks
+ .get_mut(&pattern_index)
+ .and_then(|open_stack| open_stack.pop())
+ {
+ valid_pairs.insert(((open.0, open.1, pattern_index), *close));
} else if let Some(&open_len) = close_to_open_len.get(close) {
// No open on stack - infer one based on expected open_len
- if close.0 >= open_len {
- let inferred = (close.0 - open_len, close.0);
+ if close_start >= open_len {
+ let inferred = (close_start - open_len, close_start, pattern_index);
unique_opens.insert(inferred);
valid_pairs.insert((inferred, *close));
all_brackets.push((
BracketMatch {
open_range: inferred.0..inferred.1,
- close_range: close.0..close.1,
+ close_range: close_start..close_end,
newline_only: false,
syntax_layer_depth: 0,
color_index: None,
},
+ pattern_index,
false,
));
}
}
}
- all_brackets.retain(|(m, _)| {
- let open = (m.open_range.start, m.open_range.end);
- let close = (m.close_range.start, m.close_range.end);
+ all_brackets.retain(|(bracket_match, pattern_index, _)| {
+ let open = (
+ bracket_match.open_range.start,
+ bracket_match.open_range.end,
+ *pattern_index,
+ );
+ let close = (
+ bracket_match.close_range.start,
+ bracket_match.close_range.end,
+ *pattern_index,
+ );
valid_pairs.contains(&(open, close))
});
}
@@ -4762,7 +4803,7 @@ impl BufferSnapshot {
let mut all_brackets = all_brackets
.into_iter()
.enumerate()
- .map(|(index, (bracket_match, rainbow_exclude))| {
+ .map(|(index, (bracket_match, _, rainbow_exclude))| {
// Certain languages have "brackets" that are not brackets, e.g. tags. and such
// bracket will match the entire tag with all text inside.
// For now, avoid highlighting any pair that has more than single char in each bracket.
@@ -63,6 +63,20 @@ impl LlmApiToken {
Self::fetch(self.0.write().await, client, organization_id).await
}
+ /// Clears the existing token before attempting to fetch a new one.
+ ///
+ /// Used when switching organizations so that a failed refresh doesn't
+ /// leave a token for the wrong organization.
+ pub async fn clear_and_refresh(
+ &self,
+ client: &Arc<Client>,
+ organization_id: Option<OrganizationId>,
+ ) -> Result<String> {
+ let mut lock = self.0.write().await;
+ *lock = None;
+ Self::fetch(lock, client, organization_id).await
+ }
+
async fn fetch(
mut lock: RwLockWriteGuard<'_, Option<String>>,
client: &Arc<Client>,
@@ -82,13 +96,16 @@ impl LlmApiToken {
*lock = Some(response.token.0.clone());
Ok(response.token.0)
}
- Err(err) => match err {
- ClientApiError::Unauthorized => {
- client.request_sign_out();
- Err(err).context("Failed to create LLM token")
+ Err(err) => {
+ *lock = None;
+ match err {
+ ClientApiError::Unauthorized => {
+ client.request_sign_out();
+ Err(err).context("Failed to create LLM token")
+ }
+ ClientApiError::Other(err) => Err(err),
}
- ClientApiError::Other(err) => Err(err),
- },
+ }
}
}
}
@@ -105,6 +122,11 @@ impl NeedsLlmTokenRefresh for http_client::Response<http_client::AsyncBody> {
}
}
+enum TokenRefreshMode {
+ Refresh,
+ ClearAndRefresh,
+}
+
struct GlobalRefreshLlmTokenListener(Entity<RefreshLlmTokenListener>);
impl Global for GlobalRefreshLlmTokenListener {}
@@ -140,7 +162,7 @@ impl RefreshLlmTokenListener {
let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| {
if matches!(event, client::user::Event::OrganizationChanged) {
- this.refresh(cx);
+ this.refresh(TokenRefreshMode::ClearAndRefresh, cx);
}
});
@@ -152,7 +174,7 @@ impl RefreshLlmTokenListener {
}
}
- fn refresh(&self, cx: &mut Context<Self>) {
+ fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context<Self>) {
let client = self.client.clone();
let llm_api_token = self.llm_api_token.clone();
let organization_id = self
@@ -161,7 +183,16 @@ impl RefreshLlmTokenListener {
.current_organization()
.map(|organization| organization.id.clone());
cx.spawn(async move |this, cx| {
- llm_api_token.refresh(&client, organization_id).await?;
+ match mode {
+ TokenRefreshMode::Refresh => {
+ llm_api_token.refresh(&client, organization_id).await?;
+ }
+ TokenRefreshMode::ClearAndRefresh => {
+ llm_api_token
+ .clear_and_refresh(&client, organization_id)
+ .await?;
+ }
+ }
this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent))
})
.detach_and_log_err(cx);
@@ -170,7 +201,7 @@ impl RefreshLlmTokenListener {
fn handle_refresh_llm_token(this: Entity<Self>, message: &MessageToClient, cx: &mut App) {
match message {
MessageToClient::UserUpdated => {
- this.update(cx, |this, cx| this.refresh(cx));
+ this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx));
}
}
}
@@ -67,7 +67,6 @@ vercel = { workspace = true, features = ["schemars"] }
x_ai = { workspace = true, features = ["schemars"] }
[dev-dependencies]
-
language_model = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
@@ -24,7 +24,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
use ui_input::InputField;
use util::ResultExt;
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
pub use settings::AnthropicAvailableModel as AvailableModel;
@@ -140,13 +140,10 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider {
}
fn recommended_models(&self, _cx: &App) -> Vec<Arc<dyn LanguageModel>> {
- [
- anthropic::Model::ClaudeSonnet4_6,
- anthropic::Model::ClaudeSonnet4_6Thinking,
- ]
- .into_iter()
- .map(|model| self.create_language_model(model))
- .collect()
+ [anthropic::Model::ClaudeSonnet4_6]
+ .into_iter()
+ .map(|model| self.create_language_model(model))
+ .collect()
}
fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
@@ -178,7 +175,12 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider {
max_output_tokens: model.max_output_tokens,
default_temperature: model.default_temperature,
extra_beta_headers: model.extra_beta_headers.clone(),
- mode: model.mode.unwrap_or_default().into(),
+ mode: match model.mode.unwrap_or_default() {
+ settings::ModelMode::Default => AnthropicModelMode::Default,
+ settings::ModelMode::Thinking { budget_tokens } => {
+ AnthropicModelMode::Thinking { budget_tokens }
+ }
+ },
},
);
}
@@ -356,10 +358,14 @@ pub fn into_anthropic_count_tokens_request(
} else {
Some(anthropic::StringOrContents::String(system_message))
},
- thinking: if request.thinking_allowed
- && let AnthropicModelMode::Thinking { budget_tokens } = mode
- {
- Some(anthropic::Thinking::Enabled { budget_tokens })
+ thinking: if request.thinking_allowed {
+ match mode {
+ AnthropicModelMode::Thinking { budget_tokens } => {
+ Some(anthropic::Thinking::Enabled { budget_tokens })
+ }
+ AnthropicModelMode::AdaptiveThinking => Some(anthropic::Thinking::Adaptive),
+ AnthropicModelMode::Default => None,
+ }
} else {
None
},
@@ -517,7 +523,36 @@ impl LanguageModel for AnthropicModel {
}
fn supports_thinking(&self) -> bool {
- matches!(self.model.mode(), AnthropicModelMode::Thinking { .. })
+ self.model.supports_thinking()
+ }
+
+ fn supported_effort_levels(&self) -> Vec<language_model::LanguageModelEffortLevel> {
+ if self.model.supports_adaptive_thinking() {
+ vec![
+ language_model::LanguageModelEffortLevel {
+ name: "Low".into(),
+ value: "low".into(),
+ is_default: false,
+ },
+ language_model::LanguageModelEffortLevel {
+ name: "Medium".into(),
+ value: "medium".into(),
+ is_default: false,
+ },
+ language_model::LanguageModelEffortLevel {
+ name: "High".into(),
+ value: "high".into(),
+ is_default: true,
+ },
+ language_model::LanguageModelEffortLevel {
+ name: "Max".into(),
+ value: "max".into(),
+ is_default: false,
+ },
+ ]
+ } else {
+ Vec::new()
+ }
}
fn telemetry_id(&self) -> String {
@@ -700,10 +735,14 @@ pub fn into_anthropic(
} else {
Some(anthropic::StringOrContents::String(system_message))
},
- thinking: if request.thinking_allowed
- && let AnthropicModelMode::Thinking { budget_tokens } = mode
- {
- Some(anthropic::Thinking::Enabled { budget_tokens })
+ thinking: if request.thinking_allowed {
+ match mode {
+ AnthropicModelMode::Thinking { budget_tokens } => {
+ Some(anthropic::Thinking::Enabled { budget_tokens })
+ }
+ AnthropicModelMode::AdaptiveThinking => Some(anthropic::Thinking::Adaptive),
+ AnthropicModelMode::Default => None,
+ }
} else {
None
},
@@ -723,7 +762,24 @@ pub fn into_anthropic(
LanguageModelToolChoice::None => anthropic::ToolChoice::None,
}),
metadata: None,
- output_config: None,
+ output_config: if request.thinking_allowed
+ && matches!(mode, AnthropicModelMode::AdaptiveThinking)
+ {
+ request.thinking_effort.as_deref().and_then(|effort| {
+ let effort = match effort {
+ "low" => Some(anthropic::Effort::Low),
+ "medium" => Some(anthropic::Effort::Medium),
+ "high" => Some(anthropic::Effort::High),
+ "max" => Some(anthropic::Effort::Max),
+ _ => None,
+ };
+ effort.map(|effort| anthropic::OutputConfig {
+ effort: Some(effort),
+ })
+ })
+ } else {
+ None
+ },
stop_sequences: Vec::new(),
speed: request.speed.map(From::from),
temperature: request.temperature.or(Some(default_temperature)),
@@ -817,9 +873,9 @@ impl AnthropicEventMapper {
// valid JSON that serde can accept, e.g. by closing
// unclosed delimiters. This way, we can update the
// UI with whatever has been streamed back so far.
- if let Ok(input) = serde_json::Value::from_str(
- &partial_json_fixer::fix_json(&tool_use.input_json),
- ) {
+ if let Ok(input) =
+ serde_json::Value::from_str(&fix_streamed_json(&tool_use.input_json))
+ {
return vec![Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: tool_use.id.clone().into(),
@@ -48,7 +48,7 @@ use ui_input::InputField;
use util::ResultExt;
use crate::AllLanguageModelSettings;
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
actions!(bedrock, [Tab, TabPrev]);
@@ -642,10 +642,36 @@ impl LanguageModel for BedrockModel {
}
fn supports_thinking(&self) -> bool {
- matches!(
- self.model.mode(),
- BedrockModelMode::Thinking { .. } | BedrockModelMode::AdaptiveThinking { .. }
- )
+ self.model.supports_thinking()
+ }
+
+ fn supported_effort_levels(&self) -> Vec<language_model::LanguageModelEffortLevel> {
+ if self.model.supports_adaptive_thinking() {
+ vec![
+ language_model::LanguageModelEffortLevel {
+ name: "Low".into(),
+ value: "low".into(),
+ is_default: false,
+ },
+ language_model::LanguageModelEffortLevel {
+ name: "Medium".into(),
+ value: "medium".into(),
+ is_default: false,
+ },
+ language_model::LanguageModelEffortLevel {
+ name: "High".into(),
+ value: "high".into(),
+ is_default: true,
+ },
+ language_model::LanguageModelEffortLevel {
+ name: "Max".into(),
+ value: "max".into(),
+ is_default: false,
+ },
+ ]
+ } else {
+ Vec::new()
+ }
}
fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
@@ -718,7 +744,7 @@ impl LanguageModel for BedrockModel {
model_id,
self.model.default_temperature(),
self.model.max_output_tokens(),
- self.model.mode(),
+ self.model.thinking_mode(),
self.model.supports_caching(),
self.model.supports_tool_use(),
use_extended_context,
@@ -811,7 +837,7 @@ pub fn into_bedrock(
model: String,
default_temperature: f32,
max_output_tokens: u64,
- mode: BedrockModelMode,
+ thinking_mode: BedrockModelMode,
supports_caching: bool,
supports_tool_use: bool,
allow_extended_context: bool,
@@ -1085,11 +1111,24 @@ pub fn into_bedrock(
system: Some(system_message),
tools: tool_config,
thinking: if request.thinking_allowed {
- match mode {
+ match thinking_mode {
BedrockModelMode::Thinking { budget_tokens } => {
Some(bedrock::Thinking::Enabled { budget_tokens })
}
- BedrockModelMode::AdaptiveThinking { effort } => {
+ BedrockModelMode::AdaptiveThinking {
+ effort: default_effort,
+ } => {
+ let effort = request
+ .thinking_effort
+ .as_deref()
+ .and_then(|e| match e {
+ "low" => Some(bedrock::BedrockAdaptiveThinkingEffort::Low),
+ "medium" => Some(bedrock::BedrockAdaptiveThinkingEffort::Medium),
+ "high" => Some(bedrock::BedrockAdaptiveThinkingEffort::High),
+ "max" => Some(bedrock::BedrockAdaptiveThinkingEffort::Max),
+ _ => None,
+ })
+ .unwrap_or(default_effort);
Some(bedrock::Thinking::Adaptive { effort })
}
BedrockModelMode::Default => None,
@@ -1205,7 +1244,7 @@ pub fn map_to_language_model_completion_events(
{
tool_use.input_json.push_str(tool_output.input());
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&tool_use.input_json),
+ &fix_streamed_json(&tool_use.input_json),
) {
Some(Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -641,11 +641,11 @@ impl LanguageModel for CloudLanguageModel {
fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
match self.model.provider {
cloud_llm_client::LanguageModelProvider::Anthropic
- | cloud_llm_client::LanguageModelProvider::OpenAi
- | cloud_llm_client::LanguageModelProvider::XAi => {
+ | cloud_llm_client::LanguageModelProvider::OpenAi => {
LanguageModelToolSchemaFormat::JsonSchema
}
- cloud_llm_client::LanguageModelProvider::Google => {
+ cloud_llm_client::LanguageModelProvider::Google
+ | cloud_llm_client::LanguageModelProvider::XAi => {
LanguageModelToolSchemaFormat::JsonSchemaSubset
}
}
@@ -33,7 +33,7 @@ use ui::prelude::*;
use util::debug_panic;
use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic};
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
const PROVIDER_NAME: LanguageModelProviderName =
@@ -579,7 +579,7 @@ pub fn map_to_language_model_completion_events(
if !entry.id.is_empty() && !entry.name.is_empty() {
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&entry.arguments),
+ &fix_streamed_json(&entry.arguments),
) {
events.push(Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -22,7 +22,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
use ui_input::InputField;
use util::ResultExt;
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek");
const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek");
@@ -476,7 +476,7 @@ impl DeepSeekEventMapper {
if !entry.id.is_empty() && !entry.name.is_empty() {
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&entry.arguments),
+ &fix_streamed_json(&entry.arguments),
) {
events.push(Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -22,7 +22,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
use ui_input::InputField;
use util::ResultExt;
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("mistral");
const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Mistral");
@@ -647,7 +647,7 @@ impl MistralEventMapper {
if !entry.id.is_empty() && !entry.name.is_empty() {
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&entry.arguments),
+ &fix_streamed_json(&entry.arguments),
) {
events.push(Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -14,8 +14,8 @@ use language_model::{
};
use menu;
use open_ai::responses::{
- ResponseFunctionCallItem, ResponseFunctionCallOutputItem, ResponseInputContent,
- ResponseInputItem, ResponseMessageItem,
+ ResponseFunctionCallItem, ResponseFunctionCallOutputContent, ResponseFunctionCallOutputItem,
+ ResponseInputContent, ResponseInputItem, ResponseMessageItem,
};
use open_ai::{
ImageUrl, Model, OPEN_AI_API_URL, ReasoningEffort, ResponseStreamEvent,
@@ -33,7 +33,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
use ui_input::InputField;
use util::ResultExt;
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
const PROVIDER_NAME: LanguageModelProviderName = language_model::OPEN_AI_PROVIDER_NAME;
@@ -647,8 +647,16 @@ fn append_message_to_response_items(
ResponseFunctionCallOutputItem {
call_id: tool_result.tool_use_id.to_string(),
output: match tool_result.content {
- LanguageModelToolResultContent::Text(text) => text.to_string(),
- LanguageModelToolResultContent::Image(image) => image.to_base64_url(),
+ LanguageModelToolResultContent::Text(text) => {
+ ResponseFunctionCallOutputContent::Text(text.to_string())
+ }
+ LanguageModelToolResultContent::Image(image) => {
+ ResponseFunctionCallOutputContent::List(vec![
+ ResponseInputContent::Image {
+ image_url: image.to_base64_url(),
+ },
+ ])
+ }
},
},
));
@@ -828,7 +836,7 @@ impl OpenAiEventMapper {
if !entry.id.is_empty() && !entry.name.is_empty() {
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&entry.arguments),
+ &fix_streamed_json(&entry.arguments),
) {
events.push(Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -983,7 +991,7 @@ impl OpenAiResponseEventMapper {
if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) {
entry.arguments.push_str(&delta);
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&entry.arguments),
+ &fix_streamed_json(&entry.arguments),
) {
return vec![Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -21,7 +21,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
use ui_input::InputField;
use util::ResultExt;
-use crate::provider::util::parse_tool_arguments;
+use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openrouter");
const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenRouter");
@@ -657,7 +657,7 @@ impl OpenRouterEventMapper {
if !entry.id.is_empty() && !entry.name.is_empty() {
if let Ok(input) = serde_json::from_str::<serde_json::Value>(
- &partial_json_fixer::fix_json(&entry.arguments),
+ &fix_streamed_json(&entry.arguments),
) {
events.push(Ok(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
@@ -11,3 +11,99 @@ pub fn parse_tool_arguments(arguments: &str) -> Result<serde_json::Value, serde_
serde_json::Value::from_str(arguments)
}
}
+
+/// `partial_json_fixer::fix_json` converts a trailing `\` inside a string into `\\`
+/// (a literal backslash). When used for incremental parsing (comparing successive
+/// parses to extract deltas), this produces a spurious backslash character that
+/// doesn't exist in the final text, corrupting the output.
+///
+/// This function strips any trailing incomplete escape sequence before fixing,
+/// so each intermediate parse produces a true prefix of the final string value.
+pub fn fix_streamed_json(partial_json: &str) -> String {
+ let json = strip_trailing_incomplete_escape(partial_json);
+ partial_json_fixer::fix_json(json)
+}
+
+fn strip_trailing_incomplete_escape(json: &str) -> &str {
+ let trailing_backslashes = json
+ .as_bytes()
+ .iter()
+ .rev()
+ .take_while(|&&b| b == b'\\')
+ .count();
+ if trailing_backslashes % 2 == 1 {
+ &json[..json.len() - 1]
+ } else {
+ json
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_fix_streamed_json_strips_incomplete_escape() {
+ // Trailing `\` inside a string — incomplete escape sequence
+ let fixed = fix_streamed_json(r#"{"text": "hello\"#);
+ let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
+ assert_eq!(parsed["text"], "hello");
+ }
+
+ #[test]
+ fn test_fix_streamed_json_preserves_complete_escape() {
+ // `\\` is a complete escape (literal backslash)
+ let fixed = fix_streamed_json(r#"{"text": "hello\\"#);
+ let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
+ assert_eq!(parsed["text"], "hello\\");
+ }
+
+ #[test]
+ fn test_fix_streamed_json_strips_escape_after_complete_escape() {
+ // `\\\` = complete `\\` (literal backslash) + incomplete `\`
+ let fixed = fix_streamed_json(r#"{"text": "hello\\\"#);
+ let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
+ assert_eq!(parsed["text"], "hello\\");
+ }
+
+ #[test]
+ fn test_fix_streamed_json_no_escape_at_end() {
+ let fixed = fix_streamed_json(r#"{"text": "hello"#);
+ let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
+ assert_eq!(parsed["text"], "hello");
+ }
+
+ #[test]
+ fn test_fix_streamed_json_newline_escape_boundary() {
+ // Simulates a stream boundary landing between `\` and `n`
+ let fixed = fix_streamed_json(r#"{"text": "line1\"#);
+ let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
+ assert_eq!(parsed["text"], "line1");
+
+ // Next chunk completes the escape
+ let fixed = fix_streamed_json(r#"{"text": "line1\nline2"#);
+ let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
+ assert_eq!(parsed["text"], "line1\nline2");
+ }
+
+ #[test]
+ fn test_fix_streamed_json_incremental_delta_correctness() {
+ // This is the actual scenario that causes the bug:
+ // chunk 1 ends mid-escape, chunk 2 completes it.
+ let chunk1 = r#"{"replacement_text": "fn foo() {\"#;
+ let fixed1 = fix_streamed_json(chunk1);
+ let parsed1: serde_json::Value = serde_json::from_str(&fixed1).expect("valid json");
+ let text1 = parsed1["replacement_text"].as_str().expect("string");
+ assert_eq!(text1, "fn foo() {");
+
+ let chunk2 = r#"{"replacement_text": "fn foo() {\n return bar;\n}"}"#;
+ let fixed2 = fix_streamed_json(chunk2);
+ let parsed2: serde_json::Value = serde_json::from_str(&fixed2).expect("valid json");
+ let text2 = parsed2["replacement_text"].as_str().expect("string");
+ assert_eq!(text2, "fn foo() {\n return bar;\n}");
+
+ // The delta should be the newline + rest, with no spurious backslash
+ let delta = &text2[text1.len()..];
+ assert_eq!(delta, "\n return bar;\n}");
+ }
+}
@@ -23,7 +23,7 @@ impl BasedPyrightBanner {
this.have_basedpyright = true;
}
});
- let dismissed = Self::dismissed();
+ let dismissed = Self::dismissed(cx);
Self {
dismissed,
have_basedpyright: false,
@@ -209,20 +209,32 @@ impl HighlightsTreeView {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let Some(editor) = active_item
- .filter(|item| item.item_id() != cx.entity_id())
- .and_then(|item| item.downcast::<Editor>())
- else {
- self.clear(cx);
- return;
+ let active_editor = match active_item {
+ Some(active_item) => {
+ if active_item.item_id() == cx.entity_id() {
+ return;
+ } else {
+ match active_item.downcast::<Editor>() {
+ Some(active_editor) => active_editor,
+ None => {
+ self.clear(cx);
+ return;
+ }
+ }
+ }
+ }
+ None => {
+ self.clear(cx);
+ return;
+ }
};
let is_different_editor = self
.editor
.as_ref()
- .is_none_or(|state| state.editor != editor);
+ .is_none_or(|state| state.editor != active_editor);
if is_different_editor {
- self.set_editor(editor, window, cx);
+ self.set_editor(active_editor, window, cx);
}
}
@@ -1,6 +1,6 @@
name = "JSONC"
grammar = "jsonc"
-path_suffixes = ["jsonc", "bun.lock", "devcontainer.json", "pyrightconfig.json", "tsconfig.json", "luaurc"]
+path_suffixes = ["jsonc", "bun.lock", "devcontainer.json", "pyrightconfig.json", "tsconfig.json", "luaurc", "swcrc", "babelrc", "eslintrc", "stylelintrc"]
line_comments = ["// "]
autoclose_before = ",]}"
brackets = [
@@ -47,6 +47,10 @@ util.workspace = true
libwebrtc.workspace = true
livekit.workspace = true
+[target.'cfg(target_os = "linux")'.dependencies]
+tokio = { workspace = true, features = ["time"] }
+webrtc-sys.workspace = true
+
[target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies]
scap.workspace = true
@@ -35,15 +35,7 @@ fn main() {
cx.activate(true);
cx.on_action(quit);
cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
- cx.set_menus(vec![Menu {
- name: "Zed".into(),
- items: vec![MenuItem::Action {
- name: "Quit".into(),
- action: Box::new(Quit),
- os_action: None,
- checked: false,
- }],
- }]);
+ cx.set_menus([Menu::new("Zed").items([MenuItem::action("Quit", Quit)])]);
let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into());
let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into());
@@ -255,7 +247,7 @@ impl LivekitWindow {
} else {
let room = self.room.clone();
cx.spawn_in(window, async move |this, cx| {
- let (publication, stream) = room
+ let (publication, stream, _input_lag_us) = room
.publish_local_microphone_track("test_user".to_string(), false, cx)
.await
.unwrap();
@@ -67,6 +67,14 @@ pub enum Participant {
Remote(RemoteParticipant),
}
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub enum ConnectionQuality {
+ Excellent,
+ Good,
+ Poor,
+ Lost,
+}
+
#[derive(Debug, Clone)]
pub enum TrackPublication {
Local(LocalTrackPublication),
@@ -179,6 +187,10 @@ pub enum RoomEvent {
ActiveSpeakersChanged {
speakers: Vec<Participant>,
},
+ ConnectionQualityChanged {
+ participant: Participant,
+ quality: ConnectionQuality,
+ },
ConnectionStateChanged(ConnectionState),
Connected {
participants_with_tracks: Vec<(RemoteParticipant, Vec<RemoteTrackPublication>)>,
@@ -7,13 +7,18 @@ use gpui_tokio::Tokio;
use log::info;
use playback::capture_local_video_track;
use settings::Settings;
+use std::sync::{Arc, atomic::AtomicU64};
+#[cfg(target_os = "linux")]
+mod linux;
mod playback;
use crate::{
- LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication,
+ ConnectionQuality, LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication,
livekit_client::playback::Speaker,
};
+pub use livekit::SessionStats;
+pub use livekit::webrtc::stats::RtcStats;
pub use playback::AudioStream;
pub(crate) use playback::{RemoteVideoFrame, play_remote_video_track};
@@ -107,8 +112,8 @@ impl Room {
user_name: String,
is_staff: bool,
cx: &mut AsyncApp,
- ) -> Result<(LocalTrackPublication, playback::AudioStream)> {
- let (track, stream) = self
+ ) -> Result<(LocalTrackPublication, playback::AudioStream, Arc<AtomicU64>)> {
+ let (track, stream, input_lag_us) = self
.playback
.capture_local_microphone_track(user_name, is_staff, &cx)?;
let publication = self
@@ -123,7 +128,7 @@ impl Room {
)
.await?;
- Ok((publication, stream))
+ Ok((publication, stream, input_lag_us))
}
pub async fn unpublish_local_track(
@@ -158,9 +163,32 @@ impl Room {
Err(anyhow!("Client version too old to play audio in call"))
}
}
+
+ pub async fn get_stats(&self) -> Result<livekit::SessionStats> {
+ self.room.get_stats().await.map_err(anyhow::Error::from)
+ }
+
+ /// Returns a `Task` that fetches room stats on the Tokio runtime.
+ ///
+ /// LiveKit's SDK is Tokio-based, so the stats fetch must run within
+ /// a Tokio context rather than on GPUI's smol-based background executor.
+ pub fn stats_task(&self, cx: &impl gpui::AppContext) -> Task<Result<livekit::SessionStats>> {
+ let inner = self.room.clone();
+ Tokio::spawn_result(cx, async move {
+ inner.get_stats().await.map_err(anyhow::Error::from)
+ })
+ }
}
impl LocalParticipant {
+ pub fn connection_quality(&self) -> ConnectionQuality {
+ connection_quality_from_livekit(self.0.connection_quality())
+ }
+
+ pub fn audio_level(&self) -> f32 {
+ self.0.audio_level()
+ }
+
pub async fn publish_screenshare_track(
&self,
source: &dyn ScreenCaptureSource,
@@ -205,6 +233,33 @@ impl LocalParticipant {
.map(LocalTrackPublication)
.context("unpublishing a track")
}
+
+ #[cfg(target_os = "linux")]
+ pub async fn publish_screenshare_track_wayland(
+ &self,
+ cx: &mut AsyncApp,
+ ) -> Result<(
+ LocalTrackPublication,
+ Box<dyn ScreenCaptureStream>,
+ futures::channel::oneshot::Receiver<()>,
+ )> {
+ let (track, stop_flag, feed_task, failure_rx) =
+ linux::start_wayland_desktop_capture(cx).await?;
+ let options = livekit::options::TrackPublishOptions {
+ source: livekit::track::TrackSource::Screenshare,
+ video_codec: livekit::options::VideoCodec::VP8,
+ ..Default::default()
+ };
+ let publication = self
+ .publish_track(livekit::track::LocalTrack::Video(track.0), options, cx)
+ .await?;
+
+ Ok((
+ publication,
+ Box::new(linux::WaylandScreenCaptureStream::new(stop_flag, feed_task)),
+ failure_rx,
+ ))
+ }
}
impl LocalTrackPublication {
@@ -234,6 +289,14 @@ impl LocalTrackPublication {
}
impl RemoteParticipant {
+ pub fn connection_quality(&self) -> ConnectionQuality {
+ connection_quality_from_livekit(self.0.connection_quality())
+ }
+
+ pub fn audio_level(&self) -> f32 {
+ self.0.audio_level()
+ }
+
pub fn identity(&self) -> ParticipantIdentity {
ParticipantIdentity(self.0.identity().0)
}
@@ -297,6 +360,31 @@ impl Participant {
}
}
}
+
+ pub fn connection_quality(&self) -> ConnectionQuality {
+ match self {
+ Participant::Local(local_participant) => local_participant.connection_quality(),
+ Participant::Remote(remote_participant) => remote_participant.connection_quality(),
+ }
+ }
+
+ pub fn audio_level(&self) -> f32 {
+ match self {
+ Participant::Local(local_participant) => local_participant.audio_level(),
+ Participant::Remote(remote_participant) => remote_participant.audio_level(),
+ }
+ }
+}
+
+fn connection_quality_from_livekit(
+ quality: livekit::prelude::ConnectionQuality,
+) -> ConnectionQuality {
+ match quality {
+ livekit::prelude::ConnectionQuality::Excellent => ConnectionQuality::Excellent,
+ livekit::prelude::ConnectionQuality::Good => ConnectionQuality::Good,
+ livekit::prelude::ConnectionQuality::Poor => ConnectionQuality::Poor,
+ livekit::prelude::ConnectionQuality::Lost => ConnectionQuality::Lost,
+ }
}
fn participant_from_livekit(participant: livekit::participant::Participant) -> Participant {
@@ -474,6 +562,13 @@ fn room_event_from_livekit(event: livekit::RoomEvent) -> Option<RoomEvent> {
},
livekit::RoomEvent::Reconnecting => RoomEvent::Reconnecting,
livekit::RoomEvent::Reconnected => RoomEvent::Reconnected,
+ livekit::RoomEvent::ConnectionQualityChanged {
+ quality,
+ participant,
+ } => RoomEvent::ConnectionQualityChanged {
+ participant: participant_from_livekit(participant),
+ quality: connection_quality_from_livekit(quality),
+ },
_ => {
log::trace!("dropping livekit event: {:?}", event);
return None;
@@ -0,0 +1,203 @@
+use anyhow::Result;
+use futures::StreamExt as _;
+use futures::channel::oneshot;
+use gpui::{AsyncApp, ScreenCaptureStream};
+use livekit::track;
+use livekit::webrtc::{
+ prelude::NV12Buffer,
+ video_frame::{VideoFrame, VideoRotation},
+ video_source::{RtcVideoSource, VideoResolution, native::NativeVideoSource},
+};
+use std::sync::{
+ Arc,
+ atomic::{AtomicBool, AtomicU64, Ordering},
+};
+
+static NEXT_WAYLAND_SHARE_ID: AtomicU64 = AtomicU64::new(1);
+const PIPEWIRE_TIMEOUT_S: u64 = 30;
+
+pub struct WaylandScreenCaptureStream {
+ id: u64,
+ stop_flag: Arc<AtomicBool>,
+ _capture_task: gpui::Task<()>,
+}
+
+impl WaylandScreenCaptureStream {
+ pub fn new(stop_flag: Arc<AtomicBool>, capture_task: gpui::Task<()>) -> Self {
+ Self {
+ id: NEXT_WAYLAND_SHARE_ID.fetch_add(1, Ordering::Relaxed),
+ stop_flag,
+ _capture_task: capture_task,
+ }
+ }
+}
+
+impl ScreenCaptureStream for WaylandScreenCaptureStream {
+ fn metadata(&self) -> Result<gpui::SourceMetadata> {
+ Ok(gpui::SourceMetadata {
+ id: self.id,
+ label: None,
+ is_main: None,
+ resolution: gpui::size(gpui::DevicePixels(1), gpui::DevicePixels(1)),
+ })
+ }
+}
+
+impl Drop for WaylandScreenCaptureStream {
+ fn drop(&mut self) {
+ self.stop_flag.store(true, Ordering::Release);
+ }
+}
+
+pub(crate) async fn start_wayland_desktop_capture(
+ cx: &mut AsyncApp,
+) -> Result<(
+ crate::LocalVideoTrack,
+ Arc<AtomicBool>,
+ gpui::Task<()>,
+ oneshot::Receiver<()>,
+)> {
+ use futures::channel::mpsc;
+ use gpui::FutureExt as _;
+ use libwebrtc::desktop_capturer::{
+ CaptureError, DesktopCaptureSourceType, DesktopCapturer, DesktopCapturerOptions,
+ DesktopFrame,
+ };
+ use libwebrtc::native::yuv_helper::argb_to_nv12;
+ use std::time::Duration;
+ use webrtc_sys::webrtc::ffi as webrtc_ffi;
+
+ fn webrtc_log_callback(message: String, severity: webrtc_ffi::LoggingSeverity) {
+ match severity {
+ webrtc_ffi::LoggingSeverity::Error => log::error!("[webrtc] {}", message.trim()),
+ _ => log::debug!("[webrtc] {}", message.trim()),
+ }
+ }
+
+ let _webrtc_log_sink = webrtc_ffi::new_log_sink(webrtc_log_callback);
+ log::debug!("Wayland desktop capture: WebRTC internal logging enabled");
+
+ let stop_flag = Arc::new(AtomicBool::new(false));
+ let (mut video_source_tx, mut video_source_rx) = mpsc::channel::<NativeVideoSource>(1);
+ let (failure_tx, failure_rx) = oneshot::channel::<()>();
+
+ let mut options = DesktopCapturerOptions::new(DesktopCaptureSourceType::Generic);
+ options.set_include_cursor(true);
+ let mut capturer = DesktopCapturer::new(options).ok_or_else(|| {
+ anyhow::anyhow!(
+ "Failed to create desktop capturer. \
+ Check that xdg-desktop-portal is installed and running."
+ )
+ })?;
+
+ let permanent_error = Arc::new(AtomicBool::new(false));
+ let stop_cb = stop_flag.clone();
+ let permanent_error_cb = permanent_error.clone();
+ capturer.start_capture(None, {
+ let mut video_source: Option<NativeVideoSource> = None;
+ let mut current_width: u32 = 0;
+ let mut current_height: u32 = 0;
+ let mut video_frame = VideoFrame {
+ rotation: VideoRotation::VideoRotation0,
+ buffer: NV12Buffer::new(1, 1),
+ timestamp_us: 0,
+ };
+
+ move |result: Result<DesktopFrame, CaptureError>| {
+ let frame = match result {
+ Ok(frame) => frame,
+ Err(CaptureError::Temporary) => return,
+ Err(CaptureError::Permanent) => {
+ log::error!("Wayland desktop capture encountered a permanent error");
+ permanent_error_cb.store(true, Ordering::Release);
+ stop_cb.store(true, Ordering::Release);
+ return;
+ }
+ };
+
+ let width = frame.width() as u32;
+ let height = frame.height() as u32;
+ if width != current_width || height != current_height {
+ current_width = width;
+ current_height = height;
+ video_frame.buffer = NV12Buffer::new(width, height);
+ }
+
+ let (stride_y, stride_uv) = video_frame.buffer.strides();
+ let (data_y, data_uv) = video_frame.buffer.data_mut();
+ argb_to_nv12(
+ frame.data(),
+ frame.stride(),
+ data_y,
+ stride_y,
+ data_uv,
+ stride_uv,
+ width as i32,
+ height as i32,
+ );
+
+ if let Some(source) = &video_source {
+ source.capture_frame(&video_frame);
+ } else {
+ let source = NativeVideoSource::new(VideoResolution { width, height }, true);
+ source.capture_frame(&video_frame);
+ video_source_tx.try_send(source.clone()).ok();
+ video_source = Some(source);
+ }
+ }
+ });
+
+ log::info!("Wayland desktop capture: starting capture loop");
+
+ let stop = stop_flag.clone();
+ let tokio_task = gpui_tokio::Tokio::spawn(cx, async move {
+ loop {
+ if stop.load(Ordering::Acquire) {
+ break;
+ }
+ capturer.capture_frame();
+ tokio::time::sleep(Duration::from_millis(33)).await;
+ }
+ drop(capturer);
+
+ if permanent_error.load(Ordering::Acquire) {
+ log::error!("Wayland screen capture ended due to a permanent capture error");
+ let _ = failure_tx.send(());
+ }
+ });
+
+ let capture_task = cx.background_executor().spawn(async move {
+ if let Err(error) = tokio_task.await {
+ log::error!("Wayland capture task failed: {error}");
+ }
+ });
+
+ let executor = cx.background_executor().clone();
+ let video_source = video_source_rx
+ .next()
+ .with_timeout(Duration::from_secs(PIPEWIRE_TIMEOUT_S), &executor)
+ .await
+ .map_err(|_| {
+ stop_flag.store(true, Ordering::Relaxed);
+ log::error!("Wayland desktop capture timed out.");
+ anyhow::anyhow!(
+ "Screen sharing timed out waiting for the first frame. \
+ Check that xdg-desktop-portal and PipeWire are running, \
+ and that your portal backend matches your compositor."
+ )
+ })?
+ .ok_or_else(|| {
+ stop_flag.store(true, Ordering::Relaxed);
+ anyhow::anyhow!(
+ "Screen sharing was canceled or the portal denied permission. \
+ You can try again from the screen share button."
+ )
+ })?;
+
+ let track = super::LocalVideoTrack(track::LocalVideoTrack::create_video_track(
+ "screen share",
+ RtcVideoSource::Native(video_source),
+ ));
+
+ Ok((track, stop_flag, capture_task, failure_rx))
+}
@@ -23,15 +23,22 @@ use livekit::webrtc::{
use log::info;
use parking_lot::Mutex;
use rodio::Source;
+use rodio::conversions::SampleTypeConverter;
+use rodio::source::{AutomaticGainControlSettings, LimitSettings};
use serde::{Deserialize, Serialize};
use settings::Settings;
use std::cell::RefCell;
use std::sync::Weak;
-use std::sync::atomic::{AtomicBool, AtomicI32, Ordering};
-use std::time::Duration;
+use std::sync::atomic::{AtomicBool, AtomicI32, AtomicU64, Ordering};
+use std::time::{Duration, Instant};
use std::{borrow::Cow, collections::VecDeque, sync::Arc};
use util::{ResultExt as _, maybe};
+struct TimestampedFrame {
+ frame: AudioFrame<'static>,
+ captured_at: Instant,
+}
+
mod source;
pub(crate) struct AudioStack {
@@ -162,7 +169,7 @@ impl AudioStack {
user_name: String,
is_staff: bool,
cx: &AsyncApp,
- ) -> Result<(crate::LocalAudioTrack, AudioStream)> {
+ ) -> Result<(crate::LocalAudioTrack, AudioStream, Arc<AtomicU64>)> {
let legacy_audio_compatible =
AudioSettings::try_read_global(cx, |setting| setting.legacy_audio_compatible)
.unwrap_or(true);
@@ -202,11 +209,15 @@ impl AudioStack {
let apm = self.apm.clone();
- let (frame_tx, mut frame_rx) = futures::channel::mpsc::channel(1);
+ let input_lag_us = Arc::new(AtomicU64::new(0));
+ let (frame_tx, mut frame_rx) = futures::channel::mpsc::channel::<TimestampedFrame>(1);
let transmit_task = self.executor.spawn_with_priority(Priority::RealtimeAudio, {
+ let input_lag_us = input_lag_us.clone();
async move {
- while let Some(frame) = frame_rx.next().await {
- source.capture_frame(&frame).await.log_err();
+ while let Some(timestamped) = frame_rx.next().await {
+ let lag = timestamped.captured_at.elapsed();
+ input_lag_us.store(lag.as_micros() as u64, Ordering::Relaxed);
+ source.capture_frame(×tamped.frame).await.log_err();
}
}
});
@@ -251,6 +262,7 @@ impl AudioStack {
AudioStream::Output {
_drop: Box::new(on_drop),
},
+ input_lag_us,
))
}
@@ -345,7 +357,7 @@ impl AudioStack {
async fn capture_input(
executor: BackgroundExecutor,
apm: Arc<Mutex<apm::AudioProcessingModule>>,
- frame_tx: Sender<AudioFrame<'static>>,
+ frame_tx: Sender<TimestampedFrame>,
sample_rate: u32,
num_channels: u32,
input_audio_device: Option<DeviceId>,
@@ -370,12 +382,21 @@ impl AudioStack {
let ten_ms_buffer_size =
(config.channels() as u32 * config.sample_rate() / 100) as usize;
let mut buf: Vec<i16> = Vec::with_capacity(ten_ms_buffer_size);
+ let mut rodio_effects = RodioEffectsAdaptor::new(buf.len())
+ .automatic_gain_control(AutomaticGainControlSettings {
+ target_level: 0.50,
+ attack_time: Duration::from_secs(1),
+ release_time: Duration::from_secs(0),
+ absolute_max_gain: 5.0,
+ })
+ .limit(LimitSettings::live_performance());
let stream = device
.build_input_stream_raw(
&config.config(),
config.sample_format(),
move |data, _: &_| {
+ let captured_at = Instant::now();
let data = crate::get_sample_data(config.sample_format(), data)
.log_err();
let Some(data) = data else {
@@ -400,6 +421,21 @@ impl AudioStack {
sample_rate,
)
.to_owned();
+
+ if audio::LIVE_SETTINGS
+ .auto_microphone_volume
+ .load(Ordering::Relaxed)
+ {
+ rodio_effects
+ .inner_mut()
+ .inner_mut()
+ .fill_buffer_with(&sampled);
+ sampled.clear();
+ sampled.extend(SampleTypeConverter::<_, i16>::new(
+ rodio_effects.by_ref(),
+ ));
+ }
+
apm.lock()
.process_stream(
&mut sampled,
@@ -408,12 +444,16 @@ impl AudioStack {
)
.log_err();
buf.clear();
+
frame_tx
- .try_send(AudioFrame {
- data: Cow::Owned(sampled),
- sample_rate,
- num_channels,
- samples_per_channel: sample_rate / 100,
+ .try_send(TimestampedFrame {
+ frame: AudioFrame {
+ data: Cow::Owned(sampled),
+ sample_rate,
+ num_channels,
+ samples_per_channel: sample_rate / 100,
+ },
+ captured_at,
})
.ok();
}
@@ -439,6 +479,69 @@ impl AudioStack {
}
}
+/// This allows using of Rodio's effects library within our home brewn audio
+/// pipeline. The alternative would be inlining Rodio's effects which is
+/// problematic from a legal stance. We would then have to make clear that code
+/// is not owned by zed-industries while the code would be surrounded by
+/// zed-industries owned code.
+///
+/// This adaptor does incur a slight performance penalty (copying into a
+/// pre-allocated vec and back) however the impact will be immeasurably low.
+///
+/// There is no latency impact.
+pub struct RodioEffectsAdaptor {
+ input: Vec<rodio::Sample>,
+ pos: usize,
+}
+
+impl RodioEffectsAdaptor {
+ // This implementation incorrect terminology confusing everyone. A normal
+ // audio frame consists of all samples for one moment in time (one for mono,
+ // two for stereo). Here a frame of audio refers to a 10ms buffer of samples.
+ fn new(samples_per_frame: usize) -> Self {
+ Self {
+ input: Vec::with_capacity(samples_per_frame),
+ pos: 0,
+ }
+ }
+
+ fn fill_buffer_with(&mut self, integer_samples: &[i16]) {
+ self.input.clear();
+ self.input.extend(SampleTypeConverter::<_, f32>::new(
+ integer_samples.iter().copied(),
+ ));
+ self.pos = 0;
+ }
+}
+
+impl Iterator for RodioEffectsAdaptor {
+ type Item = rodio::Sample;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let sample = self.input.get(self.pos)?;
+ self.pos += 1;
+ Some(*sample)
+ }
+}
+
+impl rodio::Source for RodioEffectsAdaptor {
+ fn current_span_len(&self) -> Option<usize> {
+ None
+ }
+
+ fn channels(&self) -> rodio::ChannelCount {
+ rodio::nz!(2)
+ }
+
+ fn sample_rate(&self) -> rodio::SampleRate {
+ rodio::nz!(48000)
+ }
+
+ fn total_duration(&self) -> Option<Duration> {
+ None
+ }
+}
+
#[derive(Serialize, Deserialize, Debug)]
pub struct Speaker {
pub name: String,
@@ -446,7 +549,7 @@ pub struct Speaker {
pub sends_legacy_audio: bool,
}
-fn send_to_livekit(mut frame_tx: Sender<AudioFrame<'static>>, mut microphone: impl Source) {
+fn send_to_livekit(mut frame_tx: Sender<TimestampedFrame>, mut microphone: impl Source) {
use cpal::Sample;
let sample_rate = microphone.sample_rate().get();
let num_channels = microphone.channels().get() as u32;
@@ -459,11 +562,14 @@ fn send_to_livekit(mut frame_tx: Sender<AudioFrame<'static>>, mut microphone: im
.map(|s| s.to_sample())
.collect();
- match frame_tx.try_send(AudioFrame {
- sample_rate,
- num_channels,
- samples_per_channel: sampled.len() as u32 / num_channels,
- data: Cow::Owned(sampled),
+ match frame_tx.try_send(TimestampedFrame {
+ frame: AudioFrame {
+ sample_rate,
+ num_channels,
+ samples_per_channel: sampled.len() as u32 / num_channels,
+ data: Cow::Owned(sampled),
+ },
+ captured_at: Instant::now(),
}) {
Ok(_) => {}
Err(err) => {
@@ -15,7 +15,7 @@ pub type LocalTrackPublication = publication::LocalTrackPublication;
pub type LocalParticipant = participant::LocalParticipant;
pub type Room = test::Room;
-pub use test::{ConnectionState, ParticipantIdentity, TrackSid};
+pub use test::{ConnectionState, ParticipantIdentity, RtcStats, SessionStats, TrackSid};
pub struct AudioStream {}
@@ -1,6 +1,6 @@
use crate::{
- AudioStream, LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, Participant,
- ParticipantIdentity, RemoteTrack, RemoteTrackPublication, TrackSid,
+ AudioStream, ConnectionQuality, LocalAudioTrack, LocalTrackPublication, LocalVideoTrack,
+ Participant, ParticipantIdentity, RemoteTrack, RemoteTrackPublication, TrackSid,
test::{Room, WeakRoom},
};
use anyhow::Result;
@@ -8,6 +8,7 @@ use collections::HashMap;
use gpui::{
AsyncApp, DevicePixels, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, size,
};
+use std::sync::{Arc, atomic::AtomicU64};
#[derive(Clone, Debug)]
pub struct LocalParticipant {
@@ -28,9 +29,31 @@ impl Participant {
Participant::Remote(participant) => participant.identity.clone(),
}
}
+
+ pub fn connection_quality(&self) -> ConnectionQuality {
+ match self {
+ Participant::Local(p) => p.connection_quality(),
+ Participant::Remote(p) => p.connection_quality(),
+ }
+ }
+
+ pub fn audio_level(&self) -> f32 {
+ match self {
+ Participant::Local(p) => p.audio_level(),
+ Participant::Remote(p) => p.audio_level(),
+ }
+ }
}
impl LocalParticipant {
+ pub fn connection_quality(&self) -> ConnectionQuality {
+ ConnectionQuality::Excellent
+ }
+
+ pub fn audio_level(&self) -> f32 {
+ 0.0
+ }
+
pub async fn unpublish_track(&self, track: TrackSid, _cx: &AsyncApp) -> Result<()> {
self.room
.test_server()
@@ -41,7 +64,7 @@ impl LocalParticipant {
pub(crate) async fn publish_microphone_track(
&self,
_cx: &AsyncApp,
- ) -> Result<(LocalTrackPublication, AudioStream)> {
+ ) -> Result<(LocalTrackPublication, AudioStream, Arc<AtomicU64>)> {
let this = self.clone();
let server = this.room.test_server();
let sid = server
@@ -54,6 +77,7 @@ impl LocalParticipant {
sid,
},
AudioStream {},
+ Arc::new(AtomicU64::new(0)),
))
}
@@ -75,9 +99,42 @@ impl LocalParticipant {
Box::new(TestScreenCaptureStream {}),
))
}
+
+ #[cfg(target_os = "linux")]
+ pub async fn publish_screenshare_track_wayland(
+ &self,
+ _cx: &mut AsyncApp,
+ ) -> Result<(
+ LocalTrackPublication,
+ Box<dyn ScreenCaptureStream>,
+ futures::channel::oneshot::Receiver<()>,
+ )> {
+ let (_failure_tx, failure_rx) = futures::channel::oneshot::channel();
+ let this = self.clone();
+ let server = this.room.test_server();
+ let sid = server
+ .publish_video_track(this.room.token(), LocalVideoTrack {})
+ .await?;
+ Ok((
+ LocalTrackPublication {
+ room: self.room.downgrade(),
+ sid,
+ },
+ Box::new(TestWaylandScreenCaptureStream::new()),
+ failure_rx,
+ ))
+ }
}
impl RemoteParticipant {
+ pub fn connection_quality(&self) -> ConnectionQuality {
+ ConnectionQuality::Excellent
+ }
+
+ pub fn audio_level(&self) -> f32 {
+ 0.0
+ }
+
pub fn track_publications(&self) -> HashMap<TrackSid, RemoteTrackPublication> {
if let Some(room) = self.room.upgrade() {
let server = room.test_server();
@@ -134,3 +191,32 @@ impl ScreenCaptureStream for TestScreenCaptureStream {
})
}
}
+
+#[cfg(target_os = "linux")]
+static NEXT_TEST_WAYLAND_SHARE_ID: AtomicU64 = AtomicU64::new(1);
+
+#[cfg(target_os = "linux")]
+struct TestWaylandScreenCaptureStream {
+ id: u64,
+}
+
+#[cfg(target_os = "linux")]
+impl TestWaylandScreenCaptureStream {
+ fn new() -> Self {
+ Self {
+ id: NEXT_TEST_WAYLAND_SHARE_ID.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
+ }
+ }
+}
+
+#[cfg(target_os = "linux")]
+impl ScreenCaptureStream for TestWaylandScreenCaptureStream {
+ fn metadata(&self) -> Result<SourceMetadata> {
+ Ok(SourceMetadata {
+ id: self.id,
+ is_main: None,
+ label: None,
+ resolution: size(DevicePixels(1), DevicePixels(1)),
+ })
+ }
+}
@@ -10,7 +10,7 @@ use parking_lot::Mutex;
use postage::{mpsc, sink::Sink};
use std::sync::{
Arc, Weak,
- atomic::{AtomicBool, Ordering::SeqCst},
+ atomic::{AtomicBool, AtomicU64, Ordering::SeqCst},
};
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
@@ -40,6 +40,15 @@ pub enum ConnectionState {
Disconnected,
}
+#[derive(Clone, Debug, Default)]
+pub struct SessionStats {
+ pub publisher_stats: Vec<RtcStats>,
+ pub subscriber_stats: Vec<RtcStats>,
+}
+
+#[derive(Clone, Debug)]
+pub enum RtcStats {}
+
static SERVERS: Mutex<BTreeMap<String, Arc<TestServer>>> = Mutex::new(BTreeMap::new());
pub struct TestServer {
@@ -739,9 +748,17 @@ impl Room {
_track_name: String,
_is_staff: bool,
cx: &mut AsyncApp,
- ) -> Result<(LocalTrackPublication, AudioStream)> {
+ ) -> Result<(LocalTrackPublication, AudioStream, Arc<AtomicU64>)> {
self.local_participant().publish_microphone_track(cx).await
}
+
+ pub async fn get_stats(&self) -> Result<SessionStats> {
+ Ok(SessionStats::default())
+ }
+
+ pub fn stats_task(&self, _cx: &impl gpui::AppContext) -> gpui::Task<Result<SessionStats>> {
+ gpui::Task::ready(Ok(SessionStats::default()))
+ }
}
impl Drop for RoomState {
@@ -1271,18 +1271,23 @@ impl Element for MarkdownElement {
builder.table.start(alignments.clone());
let column_count = alignments.len();
+ builder.push_div(
+ div().flex().flex_col().items_start(),
+ range,
+ markdown_end,
+ );
builder.push_div(
div()
.id(("table", range.start))
+ .min_w_0()
.grid()
.grid_cols(column_count as u16)
.when(self.style.table_columns_min_size, |this| {
this.grid_cols_min_content(column_count as u16)
})
.when(!self.style.table_columns_min_size, |this| {
- this.grid_cols(column_count as u16)
+ this.grid_cols_max_content(column_count as u16)
})
- .w_full()
.mb_2()
.border(px(1.5))
.border_color(cx.theme().colors().border)
@@ -1430,6 +1435,7 @@ impl Element for MarkdownElement {
}
}
MarkdownTagEnd::Table => {
+ builder.pop_div();
builder.pop_div();
builder.table.end();
}
@@ -1441,6 +1447,7 @@ impl Element for MarkdownElement {
builder.table.end_row();
}
MarkdownTagEnd::TableCell => {
+ builder.replace_pending_checkbox(range);
builder.pop_div();
builder.table.end_cell();
}
@@ -1926,6 +1933,28 @@ impl MarkdownElementBuilder {
}
}
+ fn replace_pending_checkbox(&mut self, source_range: &Range<usize>) {
+ let trimmed = self.pending_line.text.trim();
+ if trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]" {
+ let checked = trimmed != "[ ]";
+ self.pending_line = PendingLine::default();
+ let checkbox = Checkbox::new(
+ ElementId::Name(
+ format!("table_checkbox_{}_{}", source_range.start, source_range.end).into(),
+ ),
+ if checked {
+ ToggleState::Selected
+ } else {
+ ToggleState::Unselected
+ },
+ )
+ .fill()
+ .visualization_only(true)
+ .into_any_element();
+ self.div_stack.last_mut().unwrap().extend([checkbox]);
+ }
+ }
+
fn flush_text(&mut self) {
let line = mem::take(&mut self.pending_line);
if line.text.is_empty() {
@@ -2493,6 +2522,48 @@ mod tests {
assert_eq!(second_word, "b");
}
+ #[test]
+ fn test_table_checkbox_detection() {
+ let md = "| Done |\n|------|\n| [x] |\n| [ ] |";
+ let (events, _, _) = crate::parser::parse_markdown(md);
+
+ let mut in_table = false;
+ let mut cell_texts: Vec<String> = Vec::new();
+ let mut current_cell = String::new();
+
+ for (range, event) in &events {
+ match event {
+ MarkdownEvent::Start(MarkdownTag::Table(_)) => in_table = true,
+ MarkdownEvent::End(MarkdownTagEnd::Table) => in_table = false,
+ MarkdownEvent::Start(MarkdownTag::TableCell) => current_cell.clear(),
+ MarkdownEvent::End(MarkdownTagEnd::TableCell) => {
+ if in_table {
+ cell_texts.push(current_cell.clone());
+ }
+ }
+ MarkdownEvent::Text if in_table => {
+ current_cell.push_str(&md[range.clone()]);
+ }
+ _ => {}
+ }
+ }
+
+ let checkbox_cells: Vec<&String> = cell_texts
+ .iter()
+ .filter(|t| {
+ let trimmed = t.trim();
+ trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]"
+ })
+ .collect();
+ assert_eq!(
+ checkbox_cells.len(),
+ 2,
+ "Expected 2 checkbox cells, got: {cell_texts:?}"
+ );
+ assert_eq!(checkbox_cells[0].trim(), "[x]");
+ assert_eq!(checkbox_cells[1].trim(), "[ ]");
+ }
+
#[gpui::test]
fn test_inline_code_word_selection_excludes_backticks(cx: &mut TestAppContext) {
// Test that double-clicking on inline code selects just the code content,
@@ -19,7 +19,6 @@ anyhow.workspace = true
async-recursion.workspace = true
collections.workspace = true
editor.workspace = true
-fs.workspace = true
gpui.workspace = true
html5ever.workspace = true
language.workspace = true
@@ -2776,6 +2776,35 @@ Some other content
);
}
+ #[gpui::test]
+ async fn test_table_with_checkboxes() {
+ let markdown = "\
+| Done | Task |
+|------|---------|
+| [x] | Fix bug |
+| [ ] | Add feature |";
+
+ let parsed = parse(markdown).await;
+ let table = match &parsed.children[0] {
+ ParsedMarkdownElement::Table(table) => table,
+ other => panic!("Expected table, got: {:?}", other),
+ };
+
+ let first_cell = &table.body[0].columns[0];
+ let first_cell_text = match &first_cell.children[0] {
+ MarkdownParagraphChunk::Text(t) => t.contents.to_string(),
+ other => panic!("Expected text chunk, got: {:?}", other),
+ };
+ assert_eq!(first_cell_text.trim(), "[x]");
+
+ let second_cell = &table.body[1].columns[0];
+ let second_cell_text = match &second_cell.children[0] {
+ MarkdownParagraphChunk::Text(t) => t.contents.to_string(),
+ other => panic!("Expected text chunk, got: {:?}", other),
+ };
+ assert_eq!(second_cell_text.trim(), "[ ]");
+ }
+
#[gpui::test]
async fn test_list_basic() {
let parsed = parse(
@@ -9,7 +9,6 @@ use crate::{
markdown_preview_view::MarkdownPreviewView,
};
use collections::HashMap;
-use fs::normalize_path;
use gpui::{
AbsoluteLength, Animation, AnimationExt, AnyElement, App, AppContext as _, Context, Div,
Element, ElementId, Entity, HighlightStyle, Hsla, ImageSource, InteractiveText, IntoElement,
@@ -25,6 +24,7 @@ use std::{
};
use theme::{ActiveTheme, SyntaxTheme, ThemeSettings};
use ui::{CopyButton, LinkPreview, ToggleState, prelude::*, tooltip_container};
+use util::normalize_path;
use workspace::{OpenOptions, OpenVisible, Workspace};
pub struct CheckboxClickedEvent {
@@ -698,16 +698,15 @@ fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -
.when_some(parsed.caption.as_ref(), |this, caption| {
this.children(render_markdown_text(caption, cx))
})
- .border_1()
- .border_color(cx.border_color)
- .rounded_sm()
- .overflow_hidden()
.child(
div()
+ .rounded_sm()
+ .overflow_hidden()
+ .border_1()
+ .border_color(cx.border_color)
.min_w_0()
- .w_full()
.grid()
- .grid_cols(max_column_count as u16)
+ .grid_cols_max_content(max_column_count as u16)
.children(cells),
)
.into_any()
@@ -891,6 +890,24 @@ fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext)
for parsed_region in parsed_new {
match parsed_region {
MarkdownParagraphChunk::Text(parsed) => {
+ let trimmed = parsed.contents.trim();
+ if trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]" {
+ let checked = trimmed != "[ ]";
+ let element = div()
+ .child(MarkdownCheckbox::new(
+ cx.next_id(&parsed.source_range),
+ if checked {
+ ToggleState::Selected
+ } else {
+ ToggleState::Unselected
+ },
+ cx.clone(),
+ ))
+ .into_any();
+ any_element.push(element);
+ continue;
+ }
+
let element_id = cx.next_id(&parsed.source_range);
let highlights = gpui::combine_highlights(
@@ -5188,6 +5188,11 @@ impl MultiBufferSnapshot {
}
}
+ pub fn line_len_utf16(&self, row: MultiBufferRow) -> u32 {
+ self.clip_point_utf16(Unclipped(PointUtf16::new(row.0, u32::MAX)), Bias::Left)
+ .column
+ }
+
pub fn buffer_line_for_row(
&self,
row: MultiBufferRow,
@@ -2,7 +2,7 @@ use std::collections::HashSet;
use std::sync::OnceLock;
use std::sync::atomic::{AtomicUsize, Ordering};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use gpui::{App, EntityId, EventEmitter, Subscription};
use ui::{IconButtonShape, Tooltip, prelude::*};
use workspace::item::{ItemBufferKind, ItemEvent, ItemHandle};
@@ -35,10 +35,10 @@ impl MultibufferHint {
}
impl MultibufferHint {
- fn counter() -> &'static AtomicUsize {
+ fn counter(cx: &App) -> &'static AtomicUsize {
static SHOWN_COUNT: OnceLock<AtomicUsize> = OnceLock::new();
SHOWN_COUNT.get_or_init(|| {
- let value: usize = KEY_VALUE_STORE
+ let value: usize = KeyValueStore::global(cx)
.read_kvp(SHOWN_COUNT_KEY)
.ok()
.flatten()
@@ -49,19 +49,21 @@ impl MultibufferHint {
})
}
- fn shown_count() -> usize {
- Self::counter().load(Ordering::Relaxed)
+ fn shown_count(cx: &App) -> usize {
+ Self::counter(cx).load(Ordering::Relaxed)
}
fn increment_count(cx: &mut App) {
- Self::set_count(Self::shown_count() + 1, cx)
+ Self::set_count(Self::shown_count(cx) + 1, cx)
}
pub(crate) fn set_count(count: usize, cx: &mut App) {
- Self::counter().store(count, Ordering::Relaxed);
+ Self::counter(cx).store(count, Ordering::Relaxed);
- db::write_and_log(cx, move || {
- KEY_VALUE_STORE.write_kvp(SHOWN_COUNT_KEY.to_string(), format!("{}", count))
+ let kvp = KeyValueStore::global(cx);
+ db::write_and_log(cx, move || async move {
+ kvp.write_kvp(SHOWN_COUNT_KEY.to_string(), format!("{}", count))
+ .await
});
}
@@ -71,7 +73,7 @@ impl MultibufferHint {
/// Determines the toolbar location for this [`MultibufferHint`].
fn determine_toolbar_location(&mut self, cx: &mut Context<Self>) -> ToolbarItemLocation {
- if Self::shown_count() >= NUMBER_OF_HINTS {
+ if Self::shown_count(cx) >= NUMBER_OF_HINTS {
return ToolbarItemLocation::Hidden;
}
@@ -1,6 +1,6 @@
use crate::multibuffer_hint::MultibufferHint;
use client::{Client, UserStore, zed_urls};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use fs::Fs;
use gpui::{
Action, AnyElement, App, AppContext, AsyncWindowContext, Context, Entity, EventEmitter,
@@ -194,8 +194,10 @@ pub fn show_onboarding_view(app_state: Arc<AppState>, cx: &mut App) -> Task<anyh
cx.notify();
};
- db::write_and_log(cx, || {
- KEY_VALUE_STORE.write_kvp(FIRST_OPEN.to_string(), "false".to_string())
+ let kvp = KeyValueStore::global(cx);
+ db::write_and_log(cx, move || async move {
+ kvp.write_kvp(FIRST_OPEN.to_string(), "false".to_string())
+ .await
});
},
)
@@ -559,7 +561,7 @@ impl workspace::SerializableItem for Onboarding {
alive_items,
workspace_id,
"onboarding_pages",
- &persistence::ONBOARDING_PAGES,
+ &persistence::OnboardingPagesDb::global(cx),
cx,
)
}
@@ -572,10 +574,9 @@ impl workspace::SerializableItem for Onboarding {
window: &mut Window,
cx: &mut App,
) -> gpui::Task<gpui::Result<Entity<Self>>> {
+ let db = persistence::OnboardingPagesDb::global(cx);
window.spawn(cx, async move |cx| {
- if let Some(_) =
- persistence::ONBOARDING_PAGES.get_onboarding_page(item_id, workspace_id)?
- {
+ if let Some(_) = db.get_onboarding_page(item_id, workspace_id)? {
workspace.update(cx, |workspace, cx| Onboarding::new(workspace, cx))
} else {
Err(anyhow::anyhow!("No onboarding page to deserialize"))
@@ -593,11 +594,12 @@ impl workspace::SerializableItem for Onboarding {
) -> Option<gpui::Task<gpui::Result<()>>> {
let workspace_id = workspace.database_id()?;
- Some(cx.background_spawn(async move {
- persistence::ONBOARDING_PAGES
- .save_onboarding_page(item_id, workspace_id)
- .await
- }))
+ let db = persistence::OnboardingPagesDb::global(cx);
+ Some(
+ cx.background_spawn(
+ async move { db.save_onboarding_page(item_id, workspace_id).await },
+ ),
+ )
}
fn should_serialize(&self, event: &Self::Event) -> bool {
@@ -646,7 +648,7 @@ mod persistence {
];
}
- db::static_connection!(ONBOARDING_PAGES, OnboardingPagesDb, [WorkspaceDb]);
+ db::static_connection!(OnboardingPagesDb, [WorkspaceDb]);
impl OnboardingPagesDb {
query! {
@@ -55,7 +55,14 @@ pub struct ResponseFunctionCallItem {
#[derive(Debug, Serialize, Deserialize)]
pub struct ResponseFunctionCallOutputItem {
pub call_id: String,
- pub output: String,
+ pub output: ResponseFunctionCallOutputContent,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum ResponseFunctionCallOutputContent {
+ List(Vec<ResponseInputContent>),
+ Text(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -2,7 +2,7 @@ mod outline_panel_settings;
use anyhow::Context as _;
use collections::{BTreeSet, HashMap, HashSet, hash_map};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::{
AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange,
MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects,
@@ -693,16 +693,18 @@ impl OutlinePanel {
.ok()
.flatten()
{
- Some(serialization_key) => cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
- .await
- .context("loading outline panel")
- .log_err()
- .flatten()
- .map(|panel| serde_json::from_str::<SerializedOutlinePanel>(&panel))
- .transpose()
- .log_err()
- .flatten(),
+ Some(serialization_key) => {
+ let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+ cx.background_spawn(async move { kvp.read_kvp(&serialization_key) })
+ .await
+ .context("loading outline panel")
+ .log_err()
+ .flatten()
+ .map(|panel| serde_json::from_str::<SerializedOutlinePanel>(&panel))
+ .transpose()
+ .log_err()
+ .flatten()
+ }
None => None,
};
@@ -958,14 +960,14 @@ impl OutlinePanel {
};
let width = self.width;
let active = Some(self.active);
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = cx.background_spawn(
async move {
- KEY_VALUE_STORE
- .write_kvp(
- serialization_key,
- serde_json::to_string(&SerializedOutlinePanel { width, active })?,
- )
- .await?;
+ kvp.write_kvp(
+ serialization_key,
+ serde_json::to_string(&SerializedOutlinePanel { width, active })?,
+ )
+ .await?;
anyhow::Ok(())
}
.log_err(),
@@ -1488,13 +1490,7 @@ impl OutlinePanel {
let context_menu = ContextMenu::build(window, cx, |menu, _, _| {
menu.context(self.focus_handle.clone())
.action(
- if cfg!(target_os = "macos") {
- "Reveal in Finder"
- } else if cfg!(target_os = "windows") {
- "Reveal in File Explorer"
- } else {
- "Reveal in File Manager"
- },
+ ui::utils::reveal_in_file_manager_label(false),
Box::new(RevealInFileManager),
)
.action("Open in Terminal", Box::new(OpenInTerminal))
@@ -788,6 +788,12 @@ impl<D: PickerDelegate> Picker<D> {
this.handle_click(ix, event.modifiers.platform, window, cx)
}),
)
+ .on_hover(cx.listener(move |this, hovered: &bool, window, cx| {
+ if *hovered {
+ this.set_selected_index(ix, None, false, window, cx);
+ cx.notify();
+ }
+ }))
.children(self.delegate.render_match(
ix,
ix == self.delegate.selected_index(),
@@ -32,7 +32,6 @@ pub struct PlatformTitleBar {
should_move: bool,
system_window_tabs: Entity<SystemWindowTabs>,
workspace_sidebar_open: bool,
- sidebar_has_notifications: bool,
}
impl PlatformTitleBar {
@@ -47,7 +46,6 @@ impl PlatformTitleBar {
should_move: false,
system_window_tabs,
workspace_sidebar_open: false,
- sidebar_has_notifications: false,
}
}
@@ -83,19 +81,6 @@ impl PlatformTitleBar {
cx.notify();
}
- pub fn sidebar_has_notifications(&self) -> bool {
- self.sidebar_has_notifications
- }
-
- pub fn set_sidebar_has_notifications(
- &mut self,
- has_notifications: bool,
- cx: &mut Context<Self>,
- ) {
- self.sidebar_has_notifications = has_notifications;
- cx.notify();
- }
-
pub fn is_multi_workspace_enabled(cx: &App) -> bool {
cx.has_flag::<AgentV2FeatureFlag>() && !DisableAiSettings::get_global(cx).disable_ai
}
@@ -23,6 +23,7 @@ pub struct RegistryAgentMetadata {
pub description: SharedString,
pub version: SharedString,
pub repository: Option<SharedString>,
+ pub website: Option<SharedString>,
pub icon_path: Option<SharedString>,
}
@@ -75,6 +76,10 @@ impl RegistryAgent {
self.metadata().repository.as_ref()
}
+ pub fn website(&self) -> Option<&SharedString> {
+ self.metadata().website.as_ref()
+ }
+
pub fn icon_path(&self) -> Option<&SharedString> {
self.metadata().icon_path.as_ref()
}
@@ -369,6 +374,7 @@ async fn build_registry_agents(
description: entry.description.into(),
version: entry.version.into(),
repository: entry.repository.map(Into::into),
+ website: entry.website.map(Into::into),
icon_path,
};
@@ -568,6 +574,8 @@ struct RegistryEntry {
#[serde(default)]
repository: Option<String>,
#[serde(default)]
+ website: Option<String>,
+ #[serde(default)]
icon: Option<String>,
distribution: RegistryDistribution,
}
@@ -1374,13 +1374,8 @@ impl ExternalAgentServer for LocalRegistryNpxAgent {
.await
.unwrap_or_default();
- let mut exec_args = Vec::new();
- exec_args.push("--yes".to_string());
- exec_args.push(package.to_string());
- if !args.is_empty() {
- exec_args.push("--".to_string());
- exec_args.extend(args);
- }
+ let mut exec_args = vec!["--yes".to_string(), "--".to_string(), package.to_string()];
+ exec_args.extend(args);
let npm_command = node_runtime
.npm_command(
@@ -5755,6 +5755,31 @@ impl Repository {
})
}
+ /// If this is a linked worktree (*NOT* the main checkout of a repository),
+ /// returns the pathed for the linked worktree.
+ ///
+ /// Returns None if this is the main checkout.
+ pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
+ if self.work_directory_abs_path != self.original_repo_abs_path {
+ Some(&self.work_directory_abs_path)
+ } else {
+ None
+ }
+ }
+
+ pub fn path_for_new_linked_worktree(
+ &self,
+ branch_name: &str,
+ worktree_directory_setting: &str,
+ ) -> Result<PathBuf> {
+ let original_repo = self.original_repo_abs_path.clone();
+ let project_name = original_repo
+ .file_name()
+ .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
+ let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
+ Ok(directory.join(branch_name).join(project_name))
+ }
+
pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
let id = self.id;
self.send_job(None, move |repo, _| async move {
@@ -5784,25 +5809,25 @@ impl Repository {
pub fn create_worktree(
&mut self,
- name: String,
- directory: PathBuf,
+ branch_name: String,
+ path: PathBuf,
commit: Option<String>,
) -> oneshot::Receiver<Result<()>> {
let id = self.id;
self.send_job(
- Some("git worktree add".into()),
+ Some(format!("git worktree add: {}", branch_name).into()),
move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
- backend.create_worktree(name, directory, commit).await
+ backend.create_worktree(branch_name, path, commit).await
}
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
client
.request(proto::GitCreateWorktree {
project_id: project_id.0,
repository_id: id.to_proto(),
- name,
- directory: directory.to_string_lossy().to_string(),
+ name: branch_name,
+ directory: path.to_string_lossy().to_string(),
commit,
})
.await?;
@@ -6716,6 +6741,120 @@ impl Repository {
}
}
+/// If `path` is a git linked worktree checkout, resolves it to the main
+/// repository's working directory path. Returns `None` if `path` is a normal
+/// repository, not a git repo, or if resolution fails.
+///
+/// Resolution works by:
+/// 1. Reading the `.git` file to get the `gitdir:` pointer
+/// 2. Following that to the worktree-specific git directory
+/// 3. Reading the `commondir` file to find the shared `.git` directory
+/// 4. Deriving the main repo's working directory from the common dir
+pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
+ let dot_git = path.join(".git");
+ let metadata = fs.metadata(&dot_git).await.ok()??;
+ if metadata.is_dir {
+ return None; // Normal repo, not a linked worktree
+ }
+ // It's a .git file — parse the gitdir: pointer
+ let content = fs.load(&dot_git).await.ok()?;
+ let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
+ let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
+ // Read commondir to find the main .git directory
+ let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
+ let common_dir = fs
+ .canonicalize(&gitdir_abs.join(commondir_content.trim()))
+ .await
+ .ok()?;
+ Some(git::repository::original_repo_path_from_common_dir(
+ &common_dir,
+ ))
+}
+
+/// Validates that the resolved worktree directory is acceptable:
+/// - The setting must not be an absolute path.
+/// - The resolved path must be either a subdirectory of the working
+/// directory or a subdirectory of its parent (i.e., a sibling).
+///
+/// Returns `Ok(resolved_path)` or an error with a user-facing message.
+pub fn worktrees_directory_for_repo(
+ original_repo_abs_path: &Path,
+ worktree_directory_setting: &str,
+) -> Result<PathBuf> {
+ // Check the original setting before trimming, since a path like "///"
+ // is absolute but becomes "" after stripping trailing separators.
+ // Also check for leading `/` or `\` explicitly, because on Windows
+ // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
+ // would slip through even though it's clearly not a relative path.
+ if Path::new(worktree_directory_setting).is_absolute()
+ || worktree_directory_setting.starts_with('/')
+ || worktree_directory_setting.starts_with('\\')
+ {
+ anyhow::bail!(
+ "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
+ );
+ }
+
+ if worktree_directory_setting.is_empty() {
+ anyhow::bail!("git.worktree_directory must not be empty");
+ }
+
+ let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
+ if trimmed == ".." {
+ anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
+ }
+
+ let joined = original_repo_abs_path.join(trimmed);
+ let resolved = util::normalize_path(&joined);
+ let resolved = if resolved.starts_with(original_repo_abs_path) {
+ resolved
+ } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
+ resolved.join(repo_dir_name)
+ } else {
+ resolved
+ };
+
+ let parent = original_repo_abs_path
+ .parent()
+ .unwrap_or(original_repo_abs_path);
+
+ if !resolved.starts_with(parent) {
+ anyhow::bail!(
+ "git.worktree_directory resolved to {resolved:?}, which is outside \
+ the project root and its parent directory. It must resolve to a \
+ subdirectory of {original_repo_abs_path:?} or a sibling of it."
+ );
+ }
+
+ Ok(resolved)
+}
+
+/// Returns a short name for a linked worktree suitable for UI display
+///
+/// Uses the main worktree path to come up with a short name that disambiguates
+/// the linked worktree from the main worktree.
+pub fn linked_worktree_short_name(
+ main_worktree_path: &Path,
+ linked_worktree_path: &Path,
+) -> Option<SharedString> {
+ if main_worktree_path == linked_worktree_path {
+ return None;
+ }
+
+ let project_name = main_worktree_path.file_name()?.to_str()?;
+ let directory_name = linked_worktree_path.file_name()?.to_str()?;
+ let name = if directory_name != project_name {
+ directory_name.to_string()
+ } else {
+ linked_worktree_path
+ .parent()?
+ .file_name()?
+ .to_str()?
+ .to_string()
+ };
+ Some(name.into())
+}
+
fn get_permalink_in_rust_registry_src(
provider_registry: Arc<GitHostingProviderRegistry>,
path: PathBuf,
@@ -6879,7 +7018,11 @@ fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
proto::Worktree {
path: worktree.path.to_string_lossy().to_string(),
- ref_name: worktree.ref_name.to_string(),
+ ref_name: worktree
+ .ref_name
+ .as_ref()
+ .map(|s| s.to_string())
+ .unwrap_or_default(),
sha: worktree.sha.to_string(),
}
}
@@ -6887,7 +7030,7 @@ fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
git::repository::Worktree {
path: PathBuf::from(proto.path.clone()),
- ref_name: proto.ref_name.clone().into(),
+ ref_name: Some(SharedString::from(&proto.ref_name)),
sha: proto.sha.clone().into(),
}
}
@@ -2636,11 +2636,10 @@ impl LspCommand for GetCodeActions {
relevant_diagnostics.push(entry.to_lsp_diagnostic_stub()?);
}
- let supported =
- Self::supported_code_action_kinds(language_server.adapter_server_capabilities());
-
let only = if let Some(requested) = &self.kinds {
- if let Some(supported_kinds) = supported {
+ if let Some(supported_kinds) =
+ Self::supported_code_action_kinds(language_server.adapter_server_capabilities())
+ {
let filtered = requested
.iter()
.filter(|requested_kind| {
@@ -2655,7 +2654,7 @@ impl LspCommand for GetCodeActions {
Some(requested.clone())
}
} else {
- supported
+ None
};
Ok(lsp::CodeActionParams {
@@ -33,7 +33,7 @@ pub mod search_history;
pub mod yarn;
use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
-use itertools::Either;
+use itertools::{Either, Itertools};
use crate::{
git_store::GitStore,
@@ -47,6 +47,7 @@ pub use agent_server_store::{AgentId, AgentServerStore, AgentServersUpdated, Ext
pub use git_store::{
ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},
+ linked_worktree_short_name, worktrees_directory_for_repo,
};
pub use manifest_tree::ManifestTree;
pub use project_search::{Search, SearchResults};
@@ -133,6 +134,7 @@ use text::{Anchor, BufferId, OffsetRangeExt, Point, Rope};
use toolchain_store::EmptyToolchainStore;
use util::{
ResultExt as _, maybe,
+ path_list::PathList,
paths::{PathStyle, SanitizedPath, is_absolute},
rel_path::RelPath,
};
@@ -304,7 +306,7 @@ enum ProjectClientState {
/// Multi-player mode but still a local project.
Shared { remote_id: u64 },
/// Multi-player mode but working on a remote project.
- Remote {
+ Collab {
sharing_has_stopped: bool,
capability: Capability,
remote_id: u64,
@@ -1813,7 +1815,7 @@ impl Project {
client_subscriptions: Default::default(),
_subscriptions: vec![cx.on_release(Self::release)],
collab_client: client.clone(),
- client_state: ProjectClientState::Remote {
+ client_state: ProjectClientState::Collab {
sharing_has_stopped: false,
capability: Capability::ReadWrite,
remote_id,
@@ -1931,7 +1933,7 @@ impl Project {
ProjectClientState::Shared { .. } => {
let _ = self.unshare_internal(cx);
}
- ProjectClientState::Remote { remote_id, .. } => {
+ ProjectClientState::Collab { remote_id, .. } => {
let _ = self.collab_client.send(proto::LeaveProject {
project_id: *remote_id,
});
@@ -2157,7 +2159,7 @@ impl Project {
match self.client_state {
ProjectClientState::Local => None,
ProjectClientState::Shared { remote_id, .. }
- | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
+ | ProjectClientState::Collab { remote_id, .. } => Some(remote_id),
}
}
@@ -2211,7 +2213,7 @@ impl Project {
#[inline]
pub fn replica_id(&self) -> ReplicaId {
match self.client_state {
- ProjectClientState::Remote { replica_id, .. } => replica_id,
+ ProjectClientState::Collab { replica_id, .. } => replica_id,
_ => {
if self.remote_client.is_some() {
ReplicaId::REMOTE_SERVER
@@ -2285,6 +2287,32 @@ impl Project {
self.worktree_store.read(cx).visible_worktrees(cx)
}
+ pub fn default_path_list(&self, cx: &App) -> PathList {
+ let worktree_roots = self
+ .visible_worktrees(cx)
+ .sorted_by(|left, right| {
+ left.read(cx)
+ .is_single_file()
+ .cmp(&right.read(cx).is_single_file())
+ })
+ .filter_map(|worktree| {
+ let worktree = worktree.read(cx);
+ let path = worktree.abs_path();
+ if worktree.is_single_file() {
+ Some(path.parent()?.to_path_buf())
+ } else {
+ Some(path.to_path_buf())
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if worktree_roots.is_empty() {
+ PathList::new(&[paths::home_dir().as_path()])
+ } else {
+ PathList::new(&worktree_roots)
+ }
+ }
+
#[inline]
pub fn worktree_for_root_name(&self, root_name: &str, cx: &App) -> Option<Entity<Worktree>> {
self.visible_worktrees(cx)
@@ -2725,7 +2753,7 @@ impl Project {
} else {
Capability::ReadOnly
};
- if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
+ if let ProjectClientState::Collab { capability, .. } = &mut self.client_state {
if *capability == new_capability {
return;
}
@@ -2738,7 +2766,7 @@ impl Project {
}
fn disconnected_from_host_internal(&mut self, cx: &mut App) {
- if let ProjectClientState::Remote {
+ if let ProjectClientState::Collab {
sharing_has_stopped,
..
} = &mut self.client_state
@@ -2765,7 +2793,7 @@ impl Project {
#[inline]
pub fn is_disconnected(&self, cx: &App) -> bool {
match &self.client_state {
- ProjectClientState::Remote {
+ ProjectClientState::Collab {
sharing_has_stopped,
..
} => *sharing_has_stopped,
@@ -2787,7 +2815,7 @@ impl Project {
#[inline]
pub fn capability(&self) -> Capability {
match &self.client_state {
- ProjectClientState::Remote { capability, .. } => *capability,
+ ProjectClientState::Collab { capability, .. } => *capability,
ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
}
}
@@ -2803,7 +2831,7 @@ impl Project {
ProjectClientState::Local | ProjectClientState::Shared { .. } => {
self.remote_client.is_none()
}
- ProjectClientState::Remote { .. } => false,
+ ProjectClientState::Collab { .. } => false,
}
}
@@ -2814,7 +2842,7 @@ impl Project {
ProjectClientState::Local | ProjectClientState::Shared { .. } => {
self.remote_client.is_some()
}
- ProjectClientState::Remote { .. } => false,
+ ProjectClientState::Collab { .. } => false,
}
}
@@ -2823,7 +2851,7 @@ impl Project {
pub fn is_via_collab(&self) -> bool {
match &self.client_state {
ProjectClientState::Local | ProjectClientState::Shared { .. } => false,
- ProjectClientState::Remote { .. } => true,
+ ProjectClientState::Collab { .. } => true,
}
}
@@ -4496,7 +4524,7 @@ impl Project {
match &self.client_state {
ProjectClientState::Shared { .. } => true,
ProjectClientState::Local => false,
- ProjectClientState::Remote { .. } => true,
+ ProjectClientState::Collab { .. } => true,
}
}
@@ -5621,7 +5649,7 @@ impl Project {
fn synchronize_remote_buffers(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let project_id = match self.client_state {
- ProjectClientState::Remote {
+ ProjectClientState::Collab {
sharing_has_stopped,
remote_id,
..
@@ -164,6 +164,11 @@ impl Search {
let buffer = handle.read(cx);
if !buffers.is_searchable(&buffer.remote_id()) {
continue;
+ } else if buffer
+ .file()
+ .is_some_and(|file| file.disk_state().is_deleted())
+ {
+ continue;
} else if let Some(entry_id) = buffer.entry_id(cx) {
open_buffers.insert(entry_id);
} else {
@@ -586,6 +591,9 @@ impl Search {
.filter(|buffer| {
let b = buffer.read(cx);
if let Some(file) = b.file() {
+ if file.disk_state().is_deleted() {
+ return false;
+ }
if !search_query.match_path(file.path()) {
return false;
}
@@ -19,12 +19,19 @@ pub enum QueryInsertionBehavior {
#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
pub struct SearchHistoryCursor {
selection: Option<usize>,
+ draft: Option<String>,
}
impl SearchHistoryCursor {
- /// Resets the selection to `None`.
+ /// Resets the selection to `None` and clears the draft.
pub fn reset(&mut self) {
self.selection = None;
+ self.draft = None;
+ }
+
+ /// Takes the stored draft query, if any.
+ pub fn take_draft(&mut self) -> Option<String> {
+ self.draft.take()
}
}
@@ -45,6 +52,8 @@ impl SearchHistory {
}
pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) {
+ cursor.draft = None;
+
if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains
&& let Some(previously_searched) = self.history.back_mut()
&& search_string.contains(previously_searched.as_str())
@@ -81,7 +90,23 @@ impl SearchHistory {
/// Get the previous history entry using the given `SearchHistoryCursor`.
/// Uses the last element in the history when there is no cursor.
- pub fn previous(&mut self, cursor: &mut SearchHistoryCursor) -> Option<&str> {
+ ///
+ /// `current_query` is the current text in the search editor. If it differs
+ /// from the history entry at the cursor position (or if the cursor has no
+ /// selection), it is saved as a draft so it can be restored later.
+ pub fn previous(
+ &mut self,
+ cursor: &mut SearchHistoryCursor,
+ current_query: &str,
+ ) -> Option<&str> {
+ let matches_history = cursor
+ .selection
+ .and_then(|i| self.history.get(i))
+ .is_some_and(|entry| entry == current_query);
+ if !matches_history {
+ cursor.draft = Some(current_query.to_string());
+ }
+
let prev_index = match cursor.selection {
Some(index) => index.checked_sub(1)?,
None => self.history.len().checked_sub(1)?,
@@ -3,7 +3,7 @@ mod go_locator {
use dap::{DapLocator, adapters::DebugAdapterName};
use gpui::TestAppContext;
use project::debugger::locators::go::{DelveLaunchRequest, GoLocator};
- use task::{HideStrategy, RevealStrategy, RevealTarget, Shell, TaskTemplate};
+ use task::{HideStrategy, RevealStrategy, RevealTarget, SaveStrategy, Shell, TaskTemplate};
#[gpui::test]
async fn test_create_scenario_for_go_build(_: &mut TestAppContext) {
let locator = GoLocator;
@@ -22,6 +22,7 @@ mod go_locator {
tags: vec![],
show_summary: true,
show_command: true,
+ save: SaveStrategy::default(),
};
let scenario = locator
@@ -49,6 +50,7 @@ mod go_locator {
tags: vec![],
show_summary: true,
show_command: true,
+ save: SaveStrategy::default(),
};
let scenario = locator
@@ -187,6 +189,7 @@ mod go_locator {
tags: vec![],
show_summary: true,
show_command: true,
+ save: SaveStrategy::default(),
};
let scenario = locator
@@ -221,6 +224,7 @@ mod python_locator {
shell: task::Shell::System,
show_summary: false,
show_command: false,
+ save: task::SaveStrategy::default(),
};
let expected_scenario = DebugScenario {
@@ -1176,14 +1176,13 @@ mod git_traversal {
}
mod git_worktrees {
- use std::path::PathBuf;
-
use fs::FakeFs;
use gpui::TestAppContext;
+ use project::worktrees_directory_for_repo;
use serde_json::json;
use settings::SettingsStore;
+ use std::path::{Path, PathBuf};
use util::path;
-
fn init_test(cx: &mut gpui::TestAppContext) {
zlog::init_test();
@@ -1193,6 +1192,48 @@ mod git_worktrees {
});
}
+ #[test]
+ fn test_validate_worktree_directory() {
+ let work_dir = Path::new("/code/my-project");
+
+ // Valid: sibling
+ assert!(worktrees_directory_for_repo(work_dir, "../worktrees").is_ok());
+
+ // Valid: subdirectory
+ assert!(worktrees_directory_for_repo(work_dir, ".git/zed-worktrees").is_ok());
+ assert!(worktrees_directory_for_repo(work_dir, "my-worktrees").is_ok());
+
+ // Invalid: just ".." would resolve back to the working directory itself
+ let err = worktrees_directory_for_repo(work_dir, "..").unwrap_err();
+ assert!(err.to_string().contains("must not be \"..\""));
+
+ // Invalid: ".." with trailing separators
+ let err = worktrees_directory_for_repo(work_dir, "..\\").unwrap_err();
+ assert!(err.to_string().contains("must not be \"..\""));
+ let err = worktrees_directory_for_repo(work_dir, "../").unwrap_err();
+ assert!(err.to_string().contains("must not be \"..\""));
+
+ // Invalid: empty string would resolve to the working directory itself
+ let err = worktrees_directory_for_repo(work_dir, "").unwrap_err();
+ assert!(err.to_string().contains("must not be empty"));
+
+ // Invalid: absolute path
+ let err = worktrees_directory_for_repo(work_dir, "/tmp/worktrees").unwrap_err();
+ assert!(err.to_string().contains("relative path"));
+
+ // Invalid: "/" is absolute on Unix
+ let err = worktrees_directory_for_repo(work_dir, "/").unwrap_err();
+ assert!(err.to_string().contains("relative path"));
+
+ // Invalid: "///" is absolute
+ let err = worktrees_directory_for_repo(work_dir, "///").unwrap_err();
+ assert!(err.to_string().contains("relative path"));
+
+ // Invalid: escapes too far up
+ let err = worktrees_directory_for_repo(work_dir, "../../other-project/wt").unwrap_err();
+ assert!(err.to_string().contains("outside"));
+ }
+
#[gpui::test]
async fn test_git_worktrees_list_and_create(cx: &mut TestAppContext) {
init_test(cx);
@@ -1221,12 +1262,13 @@ mod git_worktrees {
assert_eq!(worktrees.len(), 1);
assert_eq!(worktrees[0].path, PathBuf::from(path!("/root")));
- let worktree_directory = PathBuf::from(path!("/root"));
+ let worktrees_directory = PathBuf::from(path!("/root"));
+ let worktree_1_directory = worktrees_directory.join("feature-branch");
cx.update(|cx| {
repository.update(cx, |repository, _| {
repository.create_worktree(
"feature-branch".to_string(),
- worktree_directory.clone(),
+ worktree_1_directory.clone(),
Some("abc123".to_string()),
)
})
@@ -1244,15 +1286,19 @@ mod git_worktrees {
.unwrap();
assert_eq!(worktrees.len(), 2);
assert_eq!(worktrees[0].path, PathBuf::from(path!("/root")));
- assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch"));
- assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+ assert_eq!(worktrees[1].path, worktree_1_directory);
+ assert_eq!(
+ worktrees[1].ref_name,
+ Some("refs/heads/feature-branch".into())
+ );
assert_eq!(worktrees[1].sha.as_ref(), "abc123");
+ let worktree_2_directory = worktrees_directory.join("bugfix-branch");
cx.update(|cx| {
repository.update(cx, |repository, _| {
repository.create_worktree(
"bugfix-branch".to_string(),
- worktree_directory.clone(),
+ worktree_2_directory.clone(),
None,
)
})
@@ -1271,24 +1317,18 @@ mod git_worktrees {
.unwrap();
assert_eq!(worktrees.len(), 3);
- let feature_worktree = worktrees
+ let worktree_1 = worktrees
.iter()
- .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch")
+ .find(|worktree| worktree.ref_name == Some("refs/heads/feature-branch".into()))
.expect("should find feature-branch worktree");
- assert_eq!(
- feature_worktree.path,
- worktree_directory.join("feature-branch")
- );
+ assert_eq!(worktree_1.path, worktree_1_directory);
- let bugfix_worktree = worktrees
+ let worktree_2 = worktrees
.iter()
- .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch")
+ .find(|worktree| worktree.ref_name == Some("refs/heads/bugfix-branch".into()))
.expect("should find bugfix-branch worktree");
- assert_eq!(
- bugfix_worktree.path,
- worktree_directory.join("bugfix-branch")
- );
- assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha");
+ assert_eq!(worktree_2.path, worktree_2_directory);
+ assert_eq!(worktree_2.sha.as_ref(), "fake-sha");
}
use crate::Project;
@@ -1498,3 +1538,113 @@ mod trust_tests {
});
}
}
+
+mod resolve_worktree_tests {
+ use fs::FakeFs;
+ use gpui::TestAppContext;
+ use project::{git_store::resolve_git_worktree_to_main_repo, linked_worktree_short_name};
+ use serde_json::json;
+ use std::path::{Path, PathBuf};
+
+ #[gpui::test]
+ async fn test_resolve_git_worktree_to_main_repo(cx: &mut TestAppContext) {
+ let fs = FakeFs::new(cx.executor());
+ // Set up a main repo with a worktree entry
+ fs.insert_tree(
+ "/main-repo",
+ json!({
+ ".git": {
+ "worktrees": {
+ "feature": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature"
+ }
+ }
+ },
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+ // Set up a worktree checkout pointing back to the main repo
+ fs.insert_tree(
+ "/worktree-checkout",
+ json!({
+ ".git": "gitdir: /main-repo/.git/worktrees/feature",
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ let result =
+ resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/worktree-checkout")).await;
+ assert_eq!(result, Some(PathBuf::from("/main-repo")));
+ }
+
+ #[gpui::test]
+ async fn test_resolve_git_worktree_normal_repo_returns_none(cx: &mut TestAppContext) {
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/repo",
+ json!({
+ ".git": {},
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ let result = resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/repo")).await;
+ assert_eq!(result, None);
+ }
+
+ #[gpui::test]
+ async fn test_resolve_git_worktree_no_git_returns_none(cx: &mut TestAppContext) {
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/plain",
+ json!({
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ let result = resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/plain")).await;
+ assert_eq!(result, None);
+ }
+
+ #[gpui::test]
+ async fn test_resolve_git_worktree_nonexistent_returns_none(cx: &mut TestAppContext) {
+ let fs = FakeFs::new(cx.executor());
+
+ let result =
+ resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/does-not-exist")).await;
+ assert_eq!(result, None);
+ }
+
+ #[test]
+ fn test_linked_worktree_short_name() {
+ let examples = [
+ (
+ "/home/bob/zed",
+ "/home/bob/worktrees/olivetti/zed",
+ Some("olivetti".into()),
+ ),
+ ("/home/bob/zed", "/home/bob/zed2", Some("zed2".into())),
+ (
+ "/home/bob/zed",
+ "/home/bob/worktrees/zed/selectric",
+ Some("selectric".into()),
+ ),
+ ("/home/bob/zed", "/home/bob/zed", None),
+ ];
+ for (main_worktree_path, linked_worktree_path, expected) in examples {
+ let short_name = linked_worktree_short_name(
+ Path::new(main_worktree_path),
+ Path::new(linked_worktree_path),
+ );
+ assert_eq!(
+ short_name, expected,
+ "short name for {linked_worktree_path:?}, linked worktree of {main_worktree_path:?}, should be {expected:?}"
+ );
+ }
+ }
+}
@@ -126,6 +126,63 @@ async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
task.await;
}
+#[gpui::test]
+async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "dir-project": {
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ },
+ "single-file.rs": "fn helper() {}"
+ }),
+ )
+ .await;
+
+ let project = Project::test(
+ fs,
+ [
+ Path::new(path!("/root/single-file.rs")),
+ Path::new(path!("/root/dir-project")),
+ ],
+ cx,
+ )
+ .await;
+
+ let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
+ let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
+
+ assert_eq!(
+ ordered_paths,
+ vec![
+ PathBuf::from(path!("/root/dir-project")),
+ PathBuf::from(path!("/root")),
+ ]
+ );
+}
+
+#[gpui::test]
+async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+
+ let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
+ let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
+
+ assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
+}
+
// NOTE:
// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
// we assume that they are not supported out of the box.
@@ -7755,6 +7812,92 @@ async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
);
}
+#[gpui::test]
+async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/dir"),
+ json!({
+ "a.ts": "a",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
+
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ language_registry.add(typescript_lang());
+ let mut fake_language_servers = language_registry.register_fake_lsp(
+ "TypeScript",
+ FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
+ lsp::CodeActionOptions {
+ code_action_kinds: Some(vec![
+ CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
+ "source.doc".into(),
+ ]),
+ ..lsp::CodeActionOptions::default()
+ },
+ )),
+ ..lsp::ServerCapabilities::default()
+ },
+ ..FakeLspAdapter::default()
+ },
+ );
+
+ let (buffer, _handle) = project
+ .update(cx, |p, cx| {
+ p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
+ })
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+
+ let fake_server = fake_language_servers
+ .next()
+ .await
+ .expect("failed to get the language server");
+
+ let mut request_handled = fake_server.set_request_handler::<
+ lsp::request::CodeActionRequest,
+ _,
+ _,
+ >(move |params, _| async move {
+ assert_eq!(
+ params.context.only, None,
+ "Code action requests without explicit kind filters should not send `context.only`"
+ );
+ Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
+ lsp::CodeAction {
+ title: "Add test".to_string(),
+ kind: Some("source.addTest".into()),
+ ..lsp::CodeAction::default()
+ },
+ )]))
+ });
+
+ let code_actions_task = project.update(cx, |project, cx| {
+ project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
+ });
+
+ let () = request_handled
+ .next()
+ .await
+ .expect("The code action request should have been triggered");
+
+ let code_actions = code_actions_task.await.unwrap().unwrap();
+ assert_eq!(code_actions.len(), 1);
+ assert_eq!(
+ code_actions[0].lsp_action.action_kind(),
+ Some("source.addTest".into())
+ );
+}
+
#[gpui::test]
async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -38,7 +38,7 @@ fn test_add() {
// add item when it equals to current item if it's not the last one
search_history.add(&mut cursor, "php".to_string());
- search_history.previous(&mut cursor);
+ search_history.previous(&mut cursor, "");
assert_eq!(search_history.current(&cursor), Some("rustlang"));
search_history.add(&mut cursor, "rustlang".to_string());
assert_eq!(search_history.len(), 3, "Should add item");
@@ -71,13 +71,13 @@ fn test_next_and_previous() {
assert_eq!(search_history.current(&cursor), Some("TypeScript"));
- assert_eq!(search_history.previous(&mut cursor), Some("JavaScript"));
+ assert_eq!(search_history.previous(&mut cursor, ""), Some("JavaScript"));
assert_eq!(search_history.current(&cursor), Some("JavaScript"));
- assert_eq!(search_history.previous(&mut cursor), Some("Rust"));
+ assert_eq!(search_history.previous(&mut cursor, ""), Some("Rust"));
assert_eq!(search_history.current(&cursor), Some("Rust"));
- assert_eq!(search_history.previous(&mut cursor), None);
+ assert_eq!(search_history.previous(&mut cursor, ""), None);
assert_eq!(search_history.current(&cursor), Some("Rust"));
assert_eq!(search_history.next(&mut cursor), Some("JavaScript"));
@@ -103,14 +103,14 @@ fn test_reset_selection() {
cursor.reset();
assert_eq!(search_history.current(&cursor), None);
assert_eq!(
- search_history.previous(&mut cursor),
+ search_history.previous(&mut cursor, ""),
Some("TypeScript"),
"Should start from the end after reset on previous item query"
);
- search_history.previous(&mut cursor);
+ search_history.previous(&mut cursor, "");
assert_eq!(search_history.current(&cursor), Some("JavaScript"));
- search_history.previous(&mut cursor);
+ search_history.previous(&mut cursor, "");
assert_eq!(search_history.current(&cursor), Some("Rust"));
cursor.reset();
@@ -134,8 +134,11 @@ fn test_multiple_cursors() {
assert_eq!(search_history.current(&cursor1), Some("TypeScript"));
assert_eq!(search_history.current(&cursor2), Some("C++"));
- assert_eq!(search_history.previous(&mut cursor1), Some("JavaScript"));
- assert_eq!(search_history.previous(&mut cursor2), Some("Java"));
+ assert_eq!(
+ search_history.previous(&mut cursor1, ""),
+ Some("JavaScript")
+ );
+ assert_eq!(search_history.previous(&mut cursor2, ""), Some("Java"));
assert_eq!(search_history.next(&mut cursor1), Some("TypeScript"));
assert_eq!(search_history.next(&mut cursor1), Some("Python"));
@@ -47,6 +47,7 @@ language.workspace = true
zed_actions.workspace = true
telemetry.workspace = true
notifications.workspace = true
+feature_flags.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
@@ -54,6 +55,7 @@ criterion.workspace = true
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
+remote_connection = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
tempfile.workspace = true
workspace = { workspace = true, features = ["test-support"] }
@@ -1,11 +1,12 @@
pub mod project_panel_settings;
+mod undo;
mod utils;
use anyhow::{Context as _, Result};
use client::{ErrorCode, ErrorExt};
use collections::{BTreeSet, HashMap, hash_map};
use command_palette_hooks::CommandPaletteFilter;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::{
Editor, EditorEvent, MultiBufferOffset,
items::{
@@ -13,20 +14,21 @@ use editor::{
entry_diagnostic_aware_icon_name_and_color, entry_git_aware_label_color,
},
};
+use feature_flags::{FeatureFlagAppExt, ProjectPanelUndoRedoFeatureFlag};
use file_icons::FileIcons;
use git;
use git::status::GitSummary;
use git_ui;
use git_ui::file_diff_view::FileDiffView;
use gpui::{
- Action, AnyElement, App, AsyncWindowContext, Bounds, ClipboardItem, Context, CursorStyle,
- DismissEvent, Div, DragMoveEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable,
- FontWeight, Hsla, InteractiveElement, KeyContext, ListHorizontalSizingBehavior,
- ListSizingBehavior, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent,
- ParentElement, PathPromptOptions, Pixels, Point, PromptLevel, Render, ScrollStrategy, Stateful,
- Styled, Subscription, Task, UniformListScrollHandle, WeakEntity, Window, actions, anchored,
- deferred, div, hsla, linear_color_stop, linear_gradient, point, px, size, transparent_white,
- uniform_list,
+ Action, AnyElement, App, AsyncWindowContext, Bounds, ClipboardEntry as GpuiClipboardEntry,
+ ClipboardItem, Context, CursorStyle, DismissEvent, Div, DragMoveEvent, Entity, EventEmitter,
+ ExternalPaths, FocusHandle, Focusable, FontWeight, Hsla, InteractiveElement, KeyContext,
+ ListHorizontalSizingBehavior, ListSizingBehavior, Modifiers, ModifiersChangedEvent,
+ MouseButton, MouseDownEvent, ParentElement, PathPromptOptions, Pixels, Point, PromptLevel,
+ Render, ScrollStrategy, Stateful, Styled, Subscription, Task, UniformListScrollHandle,
+ WeakEntity, Window, actions, anchored, deferred, div, hsla, linear_color_stop, linear_gradient,
+ point, px, size, transparent_white, uniform_list,
};
use language::DiagnosticSeverity;
use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious};
@@ -81,6 +83,8 @@ use zed_actions::{
workspace::OpenWithSystem,
};
+use crate::undo::{ProjectPanelOperation, UndoManager};
+
const PROJECT_PANEL_KEY: &str = "ProjectPanel";
const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX;
@@ -157,6 +161,7 @@ pub struct ProjectPanel {
sticky_items_count: usize,
last_reported_update: Instant,
update_visible_entries_task: UpdateVisibleEntriesTask,
+ undo_manager: UndoManager,
state: State,
}
@@ -394,6 +399,8 @@ actions!(
SelectPrevDirectory,
/// Opens a diff view to compare two marked files.
CompareMarkedFiles,
+ /// Undoes the last file operation.
+ Undo,
]
);
@@ -893,6 +900,7 @@ impl ProjectPanel {
unfolded_dir_ids: Default::default(),
},
update_visible_entries_task: Default::default(),
+ undo_manager: UndoManager::new(workspace.weak_handle()),
};
this.update_visible_entries(None, false, false, window, cx);
@@ -999,16 +1007,18 @@ impl ProjectPanel {
.ok()
.flatten()
{
- Some(serialization_key) => cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
- .await
- .context("loading project panel")
- .log_err()
- .flatten()
- .map(|panel| serde_json::from_str::<SerializedProjectPanel>(&panel))
- .transpose()
- .log_err()
- .flatten(),
+ Some(serialization_key) => {
+ let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+ cx.background_spawn(async move { kvp.read_kvp(&serialization_key) })
+ .await
+ .context("loading project panel")
+ .log_err()
+ .flatten()
+ .map(|panel| serde_json::from_str::<SerializedProjectPanel>(&panel))
+ .transpose()
+ .log_err()
+ .flatten()
+ }
None => None,
};
@@ -1114,14 +1124,14 @@ impl ProjectPanel {
return;
};
let width = self.width;
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = cx.background_spawn(
async move {
- KEY_VALUE_STORE
- .write_kvp(
- serialization_key,
- serde_json::to_string(&SerializedProjectPanel { width })?,
- )
- .await?;
+ kvp.write_kvp(
+ serialization_key,
+ serde_json::to_string(&SerializedProjectPanel { width })?,
+ )
+ .await?;
anyhow::Ok(())
}
.log_err(),
@@ -1185,8 +1195,9 @@ impl ProjectPanel {
.is_some()
};
+ let has_pasteable_content = self.has_pasteable_content(cx);
let entity = cx.entity();
- let context_menu = ContextMenu::build(window, cx, |menu, _, _| {
+ let context_menu = ContextMenu::build(window, cx, |menu, _, cx| {
menu.context(self.focus_handle.clone()).map(|menu| {
if is_read_only {
menu.when(is_dir, |menu| {
@@ -1198,13 +1209,7 @@ impl ProjectPanel {
.separator()
.when(is_local, |menu| {
menu.action(
- if cfg!(target_os = "macos") && !is_remote {
- "Reveal in Finder"
- } else if cfg!(target_os = "windows") && !is_remote {
- "Reveal in File Explorer"
- } else {
- "Reveal in File Manager"
- },
+ ui::utils::reveal_in_file_manager_label(is_remote),
Box::new(RevealInFileManager),
)
})
@@ -1231,11 +1236,14 @@ impl ProjectPanel {
.action("Copy", Box::new(Copy))
.action("Duplicate", Box::new(Duplicate))
// TODO: Paste should always be visible, cbut disabled when clipboard is empty
- .action_disabled_when(
- self.clipboard.as_ref().is_none(),
- "Paste",
- Box::new(Paste),
- )
+ .action_disabled_when(!has_pasteable_content, "Paste", Box::new(Paste))
+ .when(cx.has_flag::<ProjectPanelUndoRedoFeatureFlag>(), |menu| {
+ menu.action_disabled_when(
+ !self.undo_manager.can_undo(),
+ "Undo",
+ Box::new(Undo),
+ )
+ })
.when(is_remote, |menu| {
menu.separator()
.action("Download...", Box::new(DownloadFromRemote))
@@ -1881,6 +1889,8 @@ impl ProjectPanel {
let edit_task;
let edited_entry_id;
+ let edited_entry;
+ let new_project_path: ProjectPath;
if is_new_entry {
self.selection = Some(SelectedEntry {
worktree_id,
@@ -1891,12 +1901,14 @@ impl ProjectPanel {
return None;
}
+ edited_entry = None;
edited_entry_id = NEW_ENTRY_ID;
+ new_project_path = (worktree_id, new_path).into();
edit_task = self.project.update(cx, |project, cx| {
- project.create_entry((worktree_id, new_path), is_dir, cx)
+ project.create_entry(new_project_path.clone(), is_dir, cx)
});
} else {
- let new_path = if let Some(parent) = entry.path.clone().parent() {
+ let new_path = if let Some(parent) = entry.path.parent() {
parent.join(&filename)
} else {
filename.clone()
@@ -1908,9 +1920,11 @@ impl ProjectPanel {
return None;
}
edited_entry_id = entry.id;
+ edited_entry = Some(entry);
+ new_project_path = (worktree_id, new_path).into();
edit_task = self.project.update(cx, |project, cx| {
- project.rename_entry(entry.id, (worktree_id, new_path).into(), cx)
- });
+ project.rename_entry(edited_entry_id, new_project_path.clone(), cx)
+ })
};
if refocus {
@@ -1923,6 +1937,22 @@ impl ProjectPanel {
let new_entry = edit_task.await;
project_panel.update(cx, |project_panel, cx| {
project_panel.state.edit_state = None;
+
+ // Record the operation if the edit was applied
+ if new_entry.is_ok() {
+ let operation = if let Some(old_entry) = edited_entry {
+ ProjectPanelOperation::Rename {
+ old_path: (worktree_id, old_entry.path).into(),
+ new_path: new_project_path,
+ }
+ } else {
+ ProjectPanelOperation::Create {
+ project_path: new_project_path,
+ }
+ };
+ project_panel.undo_manager.record(operation);
+ }
+
cx.notify();
})?;
@@ -2173,6 +2203,11 @@ impl ProjectPanel {
}
}
+ pub fn undo(&mut self, _: &Undo, _window: &mut Window, cx: &mut Context<Self>) {
+ self.undo_manager.undo(cx);
+ cx.notify();
+ }
+
fn rename_impl(
&mut self,
selection: Option<Range<usize>>,
@@ -2360,6 +2395,7 @@ impl ProjectPanel {
let project_path = project.path_for_entry(selection.entry_id, cx)?;
dirty_buffers +=
project.dirty_buffers(cx).any(|path| path == project_path) as usize;
+
Some((
selection.entry_id,
project_path.path.file_name()?.to_string(),
@@ -2998,6 +3034,7 @@ impl ProjectPanel {
fn cut(&mut self, _: &Cut, _: &mut Window, cx: &mut Context<Self>) {
let entries = self.disjoint_effective_entries(cx);
if !entries.is_empty() {
+ self.write_entries_to_system_clipboard(&entries, cx);
self.clipboard = Some(ClipboardEntry::Cut(entries));
cx.notify();
}
@@ -3006,6 +3043,7 @@ impl ProjectPanel {
fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context<Self>) {
let entries = self.disjoint_effective_entries(cx);
if !entries.is_empty() {
+ self.write_entries_to_system_clipboard(&entries, cx);
self.clipboard = Some(ClipboardEntry::Copied(entries));
cx.notify();
}
@@ -3022,16 +3060,25 @@ impl ProjectPanel {
if target_entry.is_file() || (target_entry.is_dir() && target_entry.id == source.entry_id) {
new_path.pop();
}
- let clipboard_entry_file_name = self
+
+ let source_worktree = self
.project
.read(cx)
- .path_for_entry(source.entry_id, cx)?
- .path
- .file_name()?
- .to_string();
+ .worktree_for_entry(source.entry_id, cx)?;
+ let source_entry = source_worktree.read(cx).entry_for_id(source.entry_id)?;
+
+ let clipboard_entry_file_name = source_entry.path.file_name()?.to_string();
new_path.push(RelPath::unix(&clipboard_entry_file_name).unwrap());
- let extension = new_path.extension().map(|s| s.to_string());
- let file_name_without_extension = new_path.file_stem()?.to_string();
+
+ let (extension, file_name_without_extension) = if source_entry.is_file() {
+ (
+ new_path.extension().map(|s| s.to_string()),
+ new_path.file_stem()?.to_string(),
+ )
+ } else {
+ (None, clipboard_entry_file_name.clone())
+ };
+
let file_name_len = file_name_without_extension.len();
let mut disambiguation_range = None;
let mut ix = 0;
@@ -3067,6 +3114,17 @@ impl ProjectPanel {
}
fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context<Self>) {
+ if let Some(external_paths) = self.external_paths_from_system_clipboard(cx) {
+ let target_entry_id = self
+ .selection
+ .map(|s| s.entry_id)
+ .or(self.state.last_worktree_root_id);
+ if let Some(entry_id) = target_entry_id {
+ self.drop_external_files(external_paths.paths(), entry_id, window, cx);
+ }
+ return;
+ }
+
maybe!({
let (worktree, entry) = self.selected_entry_handle(cx)?;
let entry = entry.clone();
@@ -3077,8 +3135,15 @@ impl ProjectPanel {
.filter(|clipboard| !clipboard.items().is_empty())?;
enum PasteTask {
- Rename(Task<Result<CreatedEntry>>),
- Copy(Task<Result<Option<Entry>>>),
+ Rename {
+ task: Task<Result<CreatedEntry>>,
+ old_path: ProjectPath,
+ new_path: ProjectPath,
+ },
+ Copy {
+ task: Task<Result<Option<Entry>>>,
+ destination: ProjectPath,
+ },
}
let mut paste_tasks = Vec::new();
@@ -3088,16 +3153,22 @@ impl ProjectPanel {
let (new_path, new_disambiguation_range) =
self.create_paste_path(clipboard_entry, self.selected_sub_entry(cx)?, cx)?;
let clip_entry_id = clipboard_entry.entry_id;
+ let destination: ProjectPath = (worktree_id, new_path).into();
let task = if clipboard_entries.is_cut() {
+ let old_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?;
let task = self.project.update(cx, |project, cx| {
- project.rename_entry(clip_entry_id, (worktree_id, new_path).into(), cx)
+ project.rename_entry(clip_entry_id, destination.clone(), cx)
});
- PasteTask::Rename(task)
+ PasteTask::Rename {
+ task,
+ old_path,
+ new_path: destination,
+ }
} else {
let task = self.project.update(cx, |project, cx| {
- project.copy_entry(clip_entry_id, (worktree_id, new_path).into(), cx)
+ project.copy_entry(clip_entry_id, destination.clone(), cx)
});
- PasteTask::Copy(task)
+ PasteTask::Copy { task, destination }
};
paste_tasks.push(task);
disambiguation_range = new_disambiguation_range.or(disambiguation_range);
@@ -3108,26 +3179,44 @@ impl ProjectPanel {
cx.spawn_in(window, async move |project_panel, mut cx| {
let mut last_succeed = None;
+ let mut operations = Vec::new();
+
for task in paste_tasks {
match task {
- PasteTask::Rename(task) => {
+ PasteTask::Rename {
+ task,
+ old_path,
+ new_path,
+ } => {
if let Some(CreatedEntry::Included(entry)) = task
.await
.notify_workspace_async_err(workspace.clone(), &mut cx)
{
+ operations
+ .push(ProjectPanelOperation::Rename { old_path, new_path });
last_succeed = Some(entry);
}
}
- PasteTask::Copy(task) => {
+ PasteTask::Copy { task, destination } => {
if let Some(Some(entry)) = task
.await
.notify_workspace_async_err(workspace.clone(), &mut cx)
{
+ operations.push(ProjectPanelOperation::Create {
+ project_path: destination,
+ });
last_succeed = Some(entry);
}
}
}
}
+
+ project_panel
+ .update(cx, |this, _| {
+ this.undo_manager.record_batch(operations);
+ })
+ .ok();
+
// update selection
if let Some(entry) = last_succeed {
project_panel
@@ -3785,6 +3874,51 @@ impl ProjectPanel {
Some(worktree.absolutize(&root_entry.path))
}
+ fn write_entries_to_system_clipboard(&self, entries: &BTreeSet<SelectedEntry>, cx: &mut App) {
+ let project = self.project.read(cx);
+ let paths: Vec<String> = entries
+ .iter()
+ .filter_map(|entry| {
+ let worktree = project.worktree_for_id(entry.worktree_id, cx)?;
+ let worktree = worktree.read(cx);
+ let worktree_entry = worktree.entry_for_id(entry.entry_id)?;
+ Some(
+ worktree
+ .abs_path()
+ .join(worktree_entry.path.as_std_path())
+ .to_string_lossy()
+ .to_string(),
+ )
+ })
+ .collect();
+ if !paths.is_empty() {
+ cx.write_to_clipboard(ClipboardItem::new_string(paths.join("\n")));
+ }
+ }
+
+ fn external_paths_from_system_clipboard(&self, cx: &App) -> Option<ExternalPaths> {
+ let clipboard_item = cx.read_from_clipboard()?;
+ for entry in clipboard_item.entries() {
+ if let GpuiClipboardEntry::ExternalPaths(paths) = entry {
+ if !paths.paths().is_empty() {
+ return Some(paths.clone());
+ }
+ }
+ }
+ None
+ }
+
+ fn has_pasteable_content(&self, cx: &App) -> bool {
+ if self
+ .clipboard
+ .as_ref()
+ .is_some_and(|c| !c.items().is_empty())
+ {
+ return true;
+ }
+ self.external_paths_from_system_clipboard(cx).is_some()
+ }
+
fn selected_entry_handle<'a>(
&self,
cx: &'a App,
@@ -4271,19 +4405,35 @@ impl ProjectPanel {
return Ok(());
}
- let task = worktree.update(cx, |worktree, cx| {
- worktree.copy_external_entries(target_directory, paths, fs, cx)
+ let (worktree_id, task) = worktree.update(cx, |worktree, cx| {
+ (
+ worktree.id(),
+ worktree.copy_external_entries(target_directory, paths, fs, cx),
+ )
});
let opened_entries: Vec<_> = task
.await
.with_context(|| "failed to copy external paths")?;
- this.update(cx, |this, cx| {
+ this.update_in(cx, |this, window, cx| {
+ let mut did_open = false;
if open_file_after_drop && !opened_entries.is_empty() {
let settings = ProjectPanelSettings::get_global(cx);
if settings.auto_open.should_open_on_drop() {
this.open_entry(opened_entries[0], true, false, cx);
+ did_open = true;
+ }
+ }
+
+ if !did_open {
+ let new_selection = opened_entries
+ .last()
+ .map(|&entry_id| (worktree_id, entry_id));
+ for &entry_id in &opened_entries {
+ this.expand_entry(worktree_id, entry_id, cx);
}
+ this.marked_entries.clear();
+ this.update_visible_entries(new_selection, false, false, window, cx);
}
})
}
@@ -4363,9 +4513,13 @@ impl ProjectPanel {
cx.spawn_in(window, async move |project_panel, cx| {
let mut last_succeed = None;
+ let mut operations = Vec::new();
for task in copy_tasks.into_iter() {
if let Some(Some(entry)) = task.await.log_err() {
last_succeed = Some(entry.id);
+ operations.push(ProjectPanelOperation::Create {
+ project_path: (worktree_id, entry.path).into(),
+ });
}
}
// update selection
@@ -4377,6 +4531,8 @@ impl ProjectPanel {
entry_id,
});
+ project_panel.undo_manager.record_batch(operations);
+
// if only one entry was dragged and it was disambiguated, open the rename editor
if item_count == 1 && disambiguation_range.is_some() {
project_panel.rename_impl(disambiguation_range, window, cx);
@@ -4426,6 +4582,23 @@ impl ProjectPanel {
(info, folded_entries)
};
+ // Capture old paths before moving so we can record undo operations.
+ let old_paths: HashMap<ProjectEntryId, ProjectPath> = {
+ let project = self.project.read(cx);
+ entries
+ .iter()
+ .filter_map(|entry| {
+ let path = project.path_for_entry(entry.entry_id, cx)?;
+ Some((entry.entry_id, path))
+ })
+ .collect()
+ };
+ let destination_worktree_id = self
+ .project
+ .read(cx)
+ .worktree_for_entry(target_entry_id, cx)
+ .map(|wt| wt.read(cx).id());
+
// Collect move tasks paired with their source entry ID so we can correlate
// results with folded selections that need refreshing.
let mut move_tasks: Vec<(ProjectEntryId, Task<Result<CreatedEntry>>)> = Vec::new();
@@ -4441,22 +4614,48 @@ impl ProjectPanel {
let workspace = self.workspace.clone();
if folded_selection_info.is_empty() {
- for (_, task) in move_tasks {
- let workspace = workspace.clone();
- cx.spawn_in(window, async move |_, mut cx| {
- task.await.notify_workspace_async_err(workspace, &mut cx);
- })
- .detach();
- }
+ cx.spawn_in(window, async move |project_panel, mut cx| {
+ let mut operations = Vec::new();
+ for (entry_id, task) in move_tasks {
+ if let Some(CreatedEntry::Included(new_entry)) = task
+ .await
+ .notify_workspace_async_err(workspace.clone(), &mut cx)
+ {
+ if let (Some(old_path), Some(worktree_id)) =
+ (old_paths.get(&entry_id), destination_worktree_id)
+ {
+ operations.push(ProjectPanelOperation::Rename {
+ old_path: old_path.clone(),
+ new_path: (worktree_id, new_entry.path).into(),
+ });
+ }
+ }
+ }
+ project_panel
+ .update(cx, |this, _| {
+ this.undo_manager.record_batch(operations);
+ })
+ .ok();
+ })
+ .detach();
} else {
cx.spawn_in(window, async move |project_panel, mut cx| {
// Await all move tasks and collect successful results
let mut move_results: Vec<(ProjectEntryId, Entry)> = Vec::new();
+ let mut operations = Vec::new();
for (entry_id, task) in move_tasks {
if let Some(CreatedEntry::Included(new_entry)) = task
.await
.notify_workspace_async_err(workspace.clone(), &mut cx)
{
+ if let (Some(old_path), Some(worktree_id)) =
+ (old_paths.get(&entry_id), destination_worktree_id)
+ {
+ operations.push(ProjectPanelOperation::Rename {
+ old_path: old_path.clone(),
+ new_path: (worktree_id, new_entry.path.clone()).into(),
+ });
+ }
move_results.push((entry_id, new_entry));
}
}
@@ -4465,6 +4664,12 @@ impl ProjectPanel {
return;
}
+ project_panel
+ .update(cx, |this, _| {
+ this.undo_manager.record_batch(operations);
+ })
+ .ok();
+
// For folded selections, we need to refresh the leaf paths (with suffixes)
// because they may not be indexed yet after the parent directory was moved.
// First collect the paths to refresh, then refresh them.
@@ -6477,6 +6682,9 @@ impl Render for ProjectPanel {
.on_action(cx.listener(Self::fold_directory))
.on_action(cx.listener(Self::remove_from_project))
.on_action(cx.listener(Self::compare_marked_files))
+ .when(cx.has_flag::<ProjectPanelUndoRedoFeatureFlag>(), |el| {
+ el.on_action(cx.listener(Self::undo))
+ })
.when(!project.is_read_only(cx), |el| {
el.on_action(cx.listener(Self::new_file))
.on_action(cx.listener(Self::new_directory))
@@ -4,7 +4,7 @@ use editor::MultiBufferOffset;
use gpui::{Empty, Entity, TestAppContext, VisualTestContext};
use menu::Cancel;
use pretty_assertions::assert_eq;
-use project::FakeFs;
+use project::{FakeFs, ProjectPath};
use serde_json::json;
use settings::{ProjectPanelAutoOpenSettings, SettingsStore};
use std::path::{Path, PathBuf};
@@ -1635,7 +1635,10 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
"four.txt": "",
}
},
- "b": {}
+ "b": {},
+ "d.1.20": {
+ "default.conf": "",
+ }
}),
)
.await;
@@ -1688,6 +1691,7 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
" three.txt",
" one.txt",
" two.txt",
+ " > d.1.20",
]
);
@@ -1709,7 +1713,8 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
" four.txt",
" three.txt",
" one.txt",
- " two.txt"
+ " two.txt",
+ " > d.1.20",
]
);
@@ -1732,7 +1737,8 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
" four.txt",
" three.txt",
" one.txt",
- " two.txt"
+ " two.txt",
+ " > d.1.20",
]
);
@@ -1760,8 +1766,40 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
" > inner_dir",
" one.txt",
" two.txt",
+ " > d.1.20",
]
);
+
+ select_path(&panel, "root/d.1.20", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.copy(&Default::default(), window, cx);
+ panel.paste(&Default::default(), window, cx);
+ });
+ cx.executor().run_until_parked();
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ //
+ "v root",
+ " > a",
+ " v b",
+ " v a",
+ " v inner_dir",
+ " four.txt",
+ " three.txt",
+ " one.txt",
+ " two.txt",
+ " v c",
+ " > a",
+ " > inner_dir",
+ " one.txt",
+ " two.txt",
+ " v d.1.20",
+ " default.conf",
+ " > [EDITOR: 'd.1.20 copy'] <== selected",
+ ],
+ "Dotted directory names should not be split at the dot when disambiguating"
+ );
}
#[gpui::test]
@@ -1956,6 +1994,666 @@ async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext)
);
}
+#[gpui::test]
+async fn test_undo_rename(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "a.txt": "",
+ "b.txt": "",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ select_path(&panel, "root/a.txt", cx);
+ panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
+ cx.run_until_parked();
+
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("renamed.txt", window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/renamed.txt", cx).is_some(),
+ "File should be renamed to renamed.txt"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/a.txt", cx),
+ None,
+ "Original file should no longer exist"
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/a.txt", cx).is_some(),
+ "File should be restored to original name after undo"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/renamed.txt", cx),
+ None,
+ "Renamed file should no longer exist after undo"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_create_file(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "existing.txt": "",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ select_path(&panel, "root", cx);
+ panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
+ cx.run_until_parked();
+
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("new.txt", window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/new.txt", cx).is_some(),
+ "New file should exist"
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert_eq!(
+ find_project_entry(&panel, "root/new.txt", cx),
+ None,
+ "New file should be removed after undo"
+ );
+ assert!(
+ find_project_entry(&panel, "root/existing.txt", cx).is_some(),
+ "Existing file should still be present"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_create_directory(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "existing.txt": "",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ select_path(&panel, "root", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.new_directory(&NewDirectory, window, cx)
+ });
+ cx.run_until_parked();
+
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("new_dir", window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/new_dir", cx).is_some(),
+ "New directory should exist"
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert_eq!(
+ find_project_entry(&panel, "root/new_dir", cx),
+ None,
+ "New directory should be removed after undo"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_cut_paste(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "src": {
+ "file.txt": "content",
+ },
+ "dst": {},
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ toggle_expand_dir(&panel, "root/src", cx);
+
+ select_path_with_mark(&panel, "root/src/file.txt", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.cut(&Default::default(), window, cx);
+ });
+
+ select_path(&panel, "root/dst", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.paste(&Default::default(), window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/dst/file.txt", cx).is_some(),
+ "File should be moved to dst"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/src/file.txt", cx),
+ None,
+ "File should no longer be in src"
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/src/file.txt", cx).is_some(),
+ "File should be back in src after undo"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/dst/file.txt", cx),
+ None,
+ "File should no longer be in dst after undo"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_drag_single_entry(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "src": {
+ "main.rs": "",
+ },
+ "dst": {},
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ toggle_expand_dir(&panel, "root/src", cx);
+
+ panel.update(cx, |panel, _| panel.marked_entries.clear());
+ select_path_with_mark(&panel, "root/src/main.rs", cx);
+ drag_selection_to(&panel, "root/dst", false, cx);
+
+ assert!(
+ find_project_entry(&panel, "root/dst/main.rs", cx).is_some(),
+ "File should be in dst after drag"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/src/main.rs", cx),
+ None,
+ "File should no longer be in src after drag"
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/src/main.rs", cx).is_some(),
+ "File should be back in src after undo"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/dst/main.rs", cx),
+ None,
+ "File should no longer be in dst after undo"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_drag_multiple_entries(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "src": {
+ "alpha.txt": "",
+ "beta.txt": "",
+ },
+ "dst": {},
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ toggle_expand_dir(&panel, "root/src", cx);
+
+ panel.update(cx, |panel, _| panel.marked_entries.clear());
+ select_path_with_mark(&panel, "root/src/alpha.txt", cx);
+ select_path_with_mark(&panel, "root/src/beta.txt", cx);
+ drag_selection_to(&panel, "root/dst", false, cx);
+
+ assert!(
+ find_project_entry(&panel, "root/dst/alpha.txt", cx).is_some(),
+ "alpha.txt should be in dst after drag"
+ );
+ assert!(
+ find_project_entry(&panel, "root/dst/beta.txt", cx).is_some(),
+ "beta.txt should be in dst after drag"
+ );
+
+ // A single undo should revert the entire batch
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/src/alpha.txt", cx).is_some(),
+ "alpha.txt should be back in src after undo"
+ );
+ assert!(
+ find_project_entry(&panel, "root/src/beta.txt", cx).is_some(),
+ "beta.txt should be back in src after undo"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/dst/alpha.txt", cx),
+ None,
+ "alpha.txt should no longer be in dst after undo"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/dst/beta.txt", cx),
+ None,
+ "beta.txt should no longer be in dst after undo"
+ );
+}
+
+#[gpui::test]
+async fn test_multiple_sequential_undos(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "a.txt": "",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ select_path(&panel, "root/a.txt", cx);
+ panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
+ cx.run_until_parked();
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("b.txt", window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+
+ assert!(find_project_entry(&panel, "root/b.txt", cx).is_some());
+
+ select_path(&panel, "root", cx);
+ panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
+ cx.run_until_parked();
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("c.txt", window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+
+ assert!(find_project_entry(&panel, "root/b.txt", cx).is_some());
+ assert!(find_project_entry(&panel, "root/c.txt", cx).is_some());
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert_eq!(
+ find_project_entry(&panel, "root/c.txt", cx),
+ None,
+ "c.txt should be removed after first undo"
+ );
+ assert!(
+ find_project_entry(&panel, "root/b.txt", cx).is_some(),
+ "b.txt should still exist after first undo"
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/a.txt", cx).is_some(),
+ "a.txt should be restored after second undo"
+ );
+ assert_eq!(
+ find_project_entry(&panel, "root/b.txt", cx),
+ None,
+ "b.txt should no longer exist after second undo"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_with_empty_stack(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "a.txt": "",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert!(
+ find_project_entry(&panel, "root/a.txt", cx).is_some(),
+ "File tree should be unchanged after undo on empty stack"
+ );
+}
+
+#[gpui::test]
+async fn test_undo_batch(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "src": {
+ "main.rs": "// Code!"
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ let worktree_id = project.update(cx, |project, cx| {
+ project.visible_worktrees(cx).next().unwrap().read(cx).id()
+ });
+ cx.run_until_parked();
+
+ // Since there currently isn't a way to both create a folder and the file
+ // within it as two separate operations batched under the same
+ // `ProjectPanelOperation::Batch` operation, we'll simply record those
+ // ourselves, knowing that the filesystem already has the folder and file
+ // being provided in the operations.
+ panel.update(cx, |panel, _cx| {
+ panel.undo_manager.record_batch(vec![
+ ProjectPanelOperation::Create {
+ project_path: ProjectPath {
+ worktree_id,
+ path: Arc::from(rel_path("src/main.rs")),
+ },
+ },
+ ProjectPanelOperation::Create {
+ project_path: ProjectPath {
+ worktree_id,
+ path: Arc::from(rel_path("src/")),
+ },
+ },
+ ]);
+ });
+
+ // Ensure that `src/main.rs` is present in the filesystem before proceeding,
+ // otherwise this test is irrelevant.
+ assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/src/main.rs"))]);
+ assert_eq!(
+ fs.directories(false),
+ vec![
+ PathBuf::from(path!("/")),
+ PathBuf::from(path!("/root/")),
+ PathBuf::from(path!("/root/src/"))
+ ]
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ cx.run_until_parked();
+
+ assert_eq!(fs.files().len(), 0);
+ assert_eq!(
+ fs.directories(false),
+ vec![PathBuf::from(path!("/")), PathBuf::from(path!("/root/"))]
+ );
+}
+
+#[gpui::test]
+async fn test_paste_external_paths(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+ set_auto_open_settings(
+ cx,
+ ProjectPanelAutoOpenSettings {
+ on_drop: Some(false),
+ ..Default::default()
+ },
+ );
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "subdir": {}
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ path!("/external"),
+ json!({
+ "new_file.rs": "fn main() {}"
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ cx.write_to_clipboard(ClipboardItem {
+ entries: vec![GpuiClipboardEntry::ExternalPaths(ExternalPaths(
+ smallvec::smallvec![PathBuf::from(path!("/external/new_file.rs"))],
+ ))],
+ });
+
+ select_path(&panel, "root/subdir", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.paste(&Default::default(), window, cx);
+ });
+ cx.executor().run_until_parked();
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root",
+ " v subdir",
+ " new_file.rs <== selected",
+ ],
+ );
+}
+
+#[gpui::test]
+async fn test_copy_and_cut_write_to_system_clipboard(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file_a.txt": "",
+ "file_b.txt": ""
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ select_path(&panel, "root/file_a.txt", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.copy(&Default::default(), window, cx);
+ });
+
+ let clipboard = cx
+ .read_from_clipboard()
+ .expect("clipboard should have content after copy");
+ let text = clipboard.text().expect("clipboard should contain text");
+ assert!(
+ text.contains("file_a.txt"),
+ "System clipboard should contain the copied file path, got: {text}"
+ );
+
+ select_path(&panel, "root/file_b.txt", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.cut(&Default::default(), window, cx);
+ });
+
+ let clipboard = cx
+ .read_from_clipboard()
+ .expect("clipboard should have content after cut");
+ let text = clipboard.text().expect("clipboard should contain text");
+ assert!(
+ text.contains("file_b.txt"),
+ "System clipboard should contain the cut file path, got: {text}"
+ );
+}
+
#[gpui::test]
async fn test_remove_opened_file(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
@@ -9726,7 +10424,7 @@ async fn run_create_file_in_folded_path_case(
}
}
-fn init_test(cx: &mut TestAppContext) {
+pub(crate) fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
@@ -0,0 +1,286 @@
+use anyhow::anyhow;
+use gpui::{AppContext, SharedString, Task, WeakEntity};
+use project::ProjectPath;
+use std::collections::VecDeque;
+use ui::{App, IntoElement, Label, ParentElement, Styled, v_flex};
+use workspace::{
+ Workspace,
+ notifications::{NotificationId, simple_message_notification::MessageNotification},
+};
+
+const MAX_UNDO_OPERATIONS: usize = 10_000;
+
+#[derive(Clone)]
+pub enum ProjectPanelOperation {
+ Batch(Vec<ProjectPanelOperation>),
+ Create {
+ project_path: ProjectPath,
+ },
+ Rename {
+ old_path: ProjectPath,
+ new_path: ProjectPath,
+ },
+}
+
+pub struct UndoManager {
+ workspace: WeakEntity<Workspace>,
+ stack: VecDeque<ProjectPanelOperation>,
+ /// Maximum number of operations to keep on the undo stack.
+ limit: usize,
+}
+
+impl UndoManager {
+ pub fn new(workspace: WeakEntity<Workspace>) -> Self {
+ Self::new_with_limit(workspace, MAX_UNDO_OPERATIONS)
+ }
+
+ pub fn new_with_limit(workspace: WeakEntity<Workspace>, limit: usize) -> Self {
+ Self {
+ workspace,
+ limit,
+ stack: VecDeque::new(),
+ }
+ }
+
+ pub fn can_undo(&self) -> bool {
+ !self.stack.is_empty()
+ }
+
+ pub fn undo(&mut self, cx: &mut App) {
+ if let Some(operation) = self.stack.pop_back() {
+ let task = self.revert_operation(operation, cx);
+ let workspace = self.workspace.clone();
+
+ cx.spawn(async move |cx| {
+ let errors = task.await;
+ if !errors.is_empty() {
+ cx.update(|cx| {
+ let messages = errors
+ .iter()
+ .map(|err| SharedString::from(err.to_string()))
+ .collect();
+
+ Self::show_errors(workspace, messages, cx)
+ })
+ }
+ })
+ .detach();
+ }
+ }
+
+ pub fn record(&mut self, operation: ProjectPanelOperation) {
+ if self.stack.len() >= self.limit {
+ self.stack.pop_front();
+ }
+
+ self.stack.push_back(operation);
+ }
+
+ pub fn record_batch(&mut self, operations: impl IntoIterator<Item = ProjectPanelOperation>) {
+ let mut operations = operations.into_iter().collect::<Vec<_>>();
+ let operation = match operations.len() {
+ 0 => return,
+ 1 => operations.pop().unwrap(),
+ _ => ProjectPanelOperation::Batch(operations),
+ };
+
+ self.record(operation);
+ }
+
+ /// Attempts to revert the provided `operation`, returning a vector of errors
+ /// in case there was any failure while reverting the operation.
+ ///
+ /// For all operations other than [`crate::undo::ProjectPanelOperation::Batch`], a maximum
+ /// of one error is returned.
+ fn revert_operation(
+ &self,
+ operation: ProjectPanelOperation,
+ cx: &mut App,
+ ) -> Task<Vec<anyhow::Error>> {
+ match operation {
+ ProjectPanelOperation::Create { project_path } => {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Task::ready(vec![anyhow!("Failed to obtain workspace.")]);
+ };
+
+ let result = workspace.update(cx, |workspace, cx| {
+ workspace.project().update(cx, |project, cx| {
+ let entry_id = project
+ .entry_for_path(&project_path, cx)
+ .map(|entry| entry.id)
+ .ok_or_else(|| anyhow!("No entry for path."))?;
+
+ project
+ .delete_entry(entry_id, true, cx)
+ .ok_or_else(|| anyhow!("Failed to trash entry."))
+ })
+ });
+
+ let task = match result {
+ Ok(task) => task,
+ Err(err) => return Task::ready(vec![err]),
+ };
+
+ cx.spawn(async move |_| match task.await {
+ Ok(_) => vec![],
+ Err(err) => vec![err],
+ })
+ }
+ ProjectPanelOperation::Rename { old_path, new_path } => {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Task::ready(vec![anyhow!("Failed to obtain workspace.")]);
+ };
+
+ let result = workspace.update(cx, |workspace, cx| {
+ workspace.project().update(cx, |project, cx| {
+ let entry_id = project
+ .entry_for_path(&new_path, cx)
+ .map(|entry| entry.id)
+ .ok_or_else(|| anyhow!("No entry for path."))?;
+
+ Ok(project.rename_entry(entry_id, old_path.clone(), cx))
+ })
+ });
+
+ let task = match result {
+ Ok(task) => task,
+ Err(err) => return Task::ready(vec![err]),
+ };
+
+ cx.spawn(async move |_| match task.await {
+ Ok(_) => vec![],
+ Err(err) => vec![err],
+ })
+ }
+ ProjectPanelOperation::Batch(operations) => {
+ // When reverting operations in a batch, we reverse the order of
+ // operations to handle dependencies between them. For example,
+ // if a batch contains the following order of operations:
+ //
+ // 1. Create `src/`
+ // 2. Create `src/main.rs`
+ //
+ // If we first try to revert the directory creation, it would
+ // fail because there's still files inside the directory.
+ // Operations are also reverted sequentially in order to avoid
+ // this same problem.
+ let tasks: Vec<_> = operations
+ .into_iter()
+ .rev()
+ .map(|operation| self.revert_operation(operation, cx))
+ .collect();
+
+ cx.spawn(async move |_| {
+ let mut errors = Vec::new();
+ for task in tasks {
+ errors.extend(task.await);
+ }
+ errors
+ })
+ }
+ }
+ }
+
+ /// Displays a notification with the list of provided errors ensuring that,
+ /// when more than one error is provided, which can be the case when dealing
+ /// with undoing a [`crate::undo::ProjectPanelOperation::Batch`], a list is
+ /// displayed with each of the errors, instead of a single message.
+ fn show_errors(workspace: WeakEntity<Workspace>, messages: Vec<SharedString>, cx: &mut App) {
+ workspace
+ .update(cx, move |workspace, cx| {
+ let notification_id =
+ NotificationId::Named(SharedString::new_static("project_panel_undo"));
+
+ workspace.show_notification(notification_id, cx, move |cx| {
+ cx.new(|cx| {
+ if let [err] = messages.as_slice() {
+ MessageNotification::new(err.to_string(), cx)
+ .with_title("Failed to undo Project Panel Operation")
+ } else {
+ MessageNotification::new_from_builder(cx, move |_, _| {
+ v_flex()
+ .gap_1()
+ .children(
+ messages
+ .iter()
+ .map(|message| Label::new(format!("- {message}"))),
+ )
+ .into_any_element()
+ })
+ .with_title("Failed to undo Project Panel Operations")
+ }
+ })
+ })
+ })
+ .ok();
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::{
+ ProjectPanel, project_panel_tests,
+ undo::{ProjectPanelOperation, UndoManager},
+ };
+ use gpui::{Entity, TestAppContext, VisualTestContext};
+ use project::{FakeFs, Project, ProjectPath};
+ use std::sync::Arc;
+ use util::rel_path::rel_path;
+ use workspace::MultiWorkspace;
+
+ struct TestContext {
+ project: Entity<Project>,
+ panel: Entity<ProjectPanel>,
+ }
+
+ async fn init_test(cx: &mut TestAppContext) -> TestContext {
+ project_panel_tests::init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ TestContext { project, panel }
+ }
+
+ #[gpui::test]
+ async fn test_limit(cx: &mut TestAppContext) {
+ let test_context = init_test(cx).await;
+ let worktree_id = test_context.project.update(cx, |project, cx| {
+ project.visible_worktrees(cx).next().unwrap().read(cx).id()
+ });
+
+ let build_create_operation = |file_name: &str| ProjectPanelOperation::Create {
+ project_path: ProjectPath {
+ path: Arc::from(rel_path(file_name)),
+ worktree_id,
+ },
+ };
+
+ // Since we're updating the `ProjectPanel`'s undo manager with one whose
+ // limit is 3 operations, we only need to create 4 operations which
+ // we'll record, in order to confirm that the oldest operation is
+ // evicted.
+ let operation_a = build_create_operation("file_a.txt");
+ let operation_b = build_create_operation("file_b.txt");
+ let operation_c = build_create_operation("file_c.txt");
+ let operation_d = build_create_operation("file_d.txt");
+
+ test_context.panel.update(cx, move |panel, _cx| {
+ panel.undo_manager = UndoManager::new_with_limit(panel.workspace.clone(), 3);
+ panel.undo_manager.record(operation_a);
+ panel.undo_manager.record(operation_b);
+ panel.undo_manager.record(operation_c);
+ panel.undo_manager.record(operation_d);
+
+ assert_eq!(panel.undo_manager.stack.len(), 3);
+ });
+ }
+}
@@ -1,9 +1,10 @@
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use dev_container::find_configs_in_snapshot;
use gpui::{SharedString, Window};
use project::{Project, WorktreeId};
use std::sync::LazyLock;
use ui::prelude::*;
+use util::ResultExt;
use util::rel_path::RelPath;
use workspace::Workspace;
use workspace::notifications::NotificationId;
@@ -61,7 +62,7 @@ pub fn suggest_on_worktree_updated(
let project_path = abs_path.to_string_lossy().to_string();
let key_for_dismiss = project_devcontainer_key(&project_path);
- let already_dismissed = KEY_VALUE_STORE
+ let already_dismissed = KeyValueStore::global(cx)
.read_kvp(&key_for_dismiss)
.ok()
.flatten()
@@ -98,9 +99,13 @@ pub fn suggest_on_worktree_updated(
.secondary_on_click({
move |_window, cx| {
let key = key_for_dismiss.clone();
- db::write_and_log(cx, move || {
- KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string())
- });
+ let kvp = KeyValueStore::global(cx);
+ cx.background_spawn(async move {
+ kvp.write_kvp(key, "dismissed".to_string())
+ .await
+ .log_err();
+ })
+ .detach();
}
})
})
@@ -2,6 +2,7 @@ mod dev_container_suggest;
pub mod disconnected_overlay;
mod remote_connections;
mod remote_servers;
+pub mod sidebar_recent_projects;
mod ssh_config;
use std::{
@@ -46,7 +47,7 @@ use ui::{
use util::{ResultExt, paths::PathExt};
use workspace::{
HistoryManager, ModalView, MultiWorkspace, OpenOptions, OpenVisible, PathList,
- SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId,
+ SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId,
notifications::DetachAndPromptErr, with_active_or_new_workspace,
};
use zed_actions::{OpenDevContainer, OpenRecent, OpenRemote};
@@ -75,6 +76,7 @@ struct OpenFolderEntry {
enum ProjectPickerEntry {
Header(SharedString),
OpenFolder { index: usize, positions: Vec<usize> },
+ OpenProject(StringMatch),
RecentProject(StringMatch),
}
@@ -88,8 +90,9 @@ pub async fn get_recent_projects(
current_workspace_id: Option<WorkspaceId>,
limit: Option<usize>,
fs: Arc<dyn fs::Fs>,
+ db: &WorkspaceDb,
) -> Vec<RecentProjectEntry> {
- let workspaces = WORKSPACE_DB
+ let workspaces = db
.recent_workspaces_on_disk(fs.as_ref())
.await
.unwrap_or_default();
@@ -138,8 +141,8 @@ pub async fn get_recent_projects(
}
}
-pub async fn delete_recent_project(workspace_id: WorkspaceId) {
- let _ = WORKSPACE_DB.delete_workspace_by_id(workspace_id).await;
+pub async fn delete_recent_project(workspace_id: WorkspaceId, db: &WorkspaceDb) {
+ let _ = db.delete_workspace_by_id(workspace_id).await;
}
fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec<OpenFolderEntry> {
@@ -199,17 +202,19 @@ fn get_branch_for_worktree(
cx: &App,
) -> Option<SharedString> {
let worktree_abs_path = worktree.abs_path();
- for repo in repositories {
- let repo = repo.read(cx);
- if repo.work_directory_abs_path == worktree_abs_path
- || worktree_abs_path.starts_with(&*repo.work_directory_abs_path)
- {
- if let Some(branch) = &repo.branch {
- return Some(SharedString::from(branch.name().to_string()));
- }
- }
- }
- None
+ repositories
+ .iter()
+ .filter(|repo| {
+ let repo_path = &repo.read(cx).work_directory_abs_path;
+ *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
+ })
+ .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
+ .and_then(|repo| {
+ repo.read(cx)
+ .branch
+ .as_ref()
+ .map(|branch| SharedString::from(branch.name().to_string()))
+ })
}
pub fn init(cx: &mut App) {
@@ -338,19 +343,71 @@ pub fn init(cx: &mut App) {
cx.on_action(|open_recent: &OpenRecent, cx| {
let create_new_window = open_recent.create_new_window;
- with_active_or_new_workspace(cx, move |workspace, window, cx| {
- let Some(recent_projects) = workspace.active_modal::<RecentProjects>(cx) else {
- let focus_handle = workspace.focus_handle(cx);
- RecentProjects::open(workspace, create_new_window, window, focus_handle, cx);
- return;
- };
- recent_projects.update(cx, |recent_projects, cx| {
- recent_projects
- .picker
- .update(cx, |picker, cx| picker.cycle_selection(window, cx))
- });
- });
+ match cx
+ .active_window()
+ .and_then(|w| w.downcast::<MultiWorkspace>())
+ {
+ Some(multi_workspace) => {
+ cx.defer(move |cx| {
+ multi_workspace
+ .update(cx, |multi_workspace, window, cx| {
+ let sibling_workspace_ids: HashSet<WorkspaceId> = multi_workspace
+ .workspaces()
+ .iter()
+ .filter_map(|ws| ws.read(cx).database_id())
+ .collect();
+
+ let workspace = multi_workspace.workspace().clone();
+ workspace.update(cx, |workspace, cx| {
+ let Some(recent_projects) =
+ workspace.active_modal::<RecentProjects>(cx)
+ else {
+ let focus_handle = workspace.focus_handle(cx);
+ RecentProjects::open(
+ workspace,
+ create_new_window,
+ sibling_workspace_ids,
+ window,
+ focus_handle,
+ cx,
+ );
+ return;
+ };
+
+ recent_projects.update(cx, |recent_projects, cx| {
+ recent_projects
+ .picker
+ .update(cx, |picker, cx| picker.cycle_selection(window, cx))
+ });
+ });
+ })
+ .log_err();
+ });
+ }
+ None => {
+ with_active_or_new_workspace(cx, move |workspace, window, cx| {
+ let Some(recent_projects) = workspace.active_modal::<RecentProjects>(cx) else {
+ let focus_handle = workspace.focus_handle(cx);
+ RecentProjects::open(
+ workspace,
+ create_new_window,
+ HashSet::new(),
+ window,
+ focus_handle,
+ cx,
+ );
+ return;
+ };
+
+ recent_projects.update(cx, |recent_projects, cx| {
+ recent_projects
+ .picker
+ .update(cx, |picker, cx| picker.cycle_selection(window, cx))
+ });
+ });
+ }
+ }
});
cx.on_action(|open_remote: &OpenRemote, cx| {
let from_existing_connection = open_remote.from_existing_connection;
@@ -470,7 +527,7 @@ pub fn add_wsl_distro(
pub struct RecentProjects {
pub picker: Entity<Picker<RecentProjectsDelegate>>,
rem_width: f32,
- _subscription: Subscription,
+ _subscriptions: Vec<Subscription>,
}
impl ModalView for RecentProjects {
@@ -494,6 +551,7 @@ impl RecentProjects {
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
+ let style = delegate.style;
let picker = cx.new(|cx| {
Picker::list(delegate, window, cx)
.list_measure_all()
@@ -505,16 +563,32 @@ impl RecentProjects {
picker.delegate.focus_handle = picker_focus_handle;
});
- let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent));
+ let mut subscriptions = vec![cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent))];
+
+ if style == ProjectPickerStyle::Popover {
+ let picker_focus = picker.focus_handle(cx);
+ subscriptions.push(
+ cx.on_focus_out(&picker_focus, window, |this, _, window, cx| {
+ let submenu_focused = this.picker.update(cx, |picker, cx| {
+ picker.delegate.actions_menu_handle.is_focused(window, cx)
+ });
+ if !submenu_focused {
+ cx.emit(DismissEvent);
+ }
+ }),
+ );
+ }
// We do not want to block the UI on a potentially lengthy call to DB, so we're gonna swap
// out workspace locations once the future runs to completion.
+ let db = WorkspaceDb::global(cx);
cx.spawn_in(window, async move |this, cx| {
let Some(fs) = fs else { return };
- let workspaces = WORKSPACE_DB
+ let workspaces = db
.recent_workspaces_on_disk(fs.as_ref())
.await
.log_err()
.unwrap_or_default();
+ let workspaces = workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await;
this.update_in(cx, move |this, window, cx| {
this.picker.update(cx, move |picker, cx| {
picker.delegate.set_workspaces(workspaces);
@@ -527,13 +601,14 @@ impl RecentProjects {
Self {
picker,
rem_width,
- _subscription,
+ _subscriptions: subscriptions,
}
}
pub fn open(
workspace: &mut Workspace,
create_new_window: bool,
+ sibling_workspace_ids: HashSet<WorkspaceId>,
window: &mut Window,
focus_handle: FocusHandle,
cx: &mut Context<Workspace>,
@@ -542,13 +617,14 @@ impl RecentProjects {
let open_folders = get_open_folders(workspace, cx);
let project_connection_options = workspace.project().read(cx).remote_connection_options(cx);
let fs = Some(workspace.app_state().fs.clone());
+
workspace.toggle_modal(window, cx, |window, cx| {
let delegate = RecentProjectsDelegate::new(
weak,
create_new_window,
focus_handle,
open_folders,
- HashSet::new(),
+ sibling_workspace_ids,
project_connection_options,
ProjectPickerStyle::Modal,
);
@@ -559,7 +635,7 @@ impl RecentProjects {
pub fn popover(
workspace: WeakEntity<Workspace>,
- excluded_workspace_ids: HashSet<WorkspaceId>,
+ sibling_workspace_ids: HashSet<WorkspaceId>,
create_new_window: bool,
focus_handle: FocusHandle,
window: &mut Window,
@@ -583,7 +659,7 @@ impl RecentProjects {
create_new_window,
focus_handle,
open_folders,
- excluded_workspace_ids,
+ sibling_workspace_ids,
project_connection_options,
ProjectPickerStyle::Popover,
);
@@ -631,7 +707,7 @@ impl Render for RecentProjects {
pub struct RecentProjectsDelegate {
workspace: WeakEntity<Workspace>,
open_folders: Vec<OpenFolderEntry>,
- excluded_workspace_ids: HashSet<WorkspaceId>,
+ sibling_workspace_ids: HashSet<WorkspaceId>,
workspaces: Vec<(
WorkspaceId,
SerializedWorkspaceLocation,
@@ -657,7 +733,7 @@ impl RecentProjectsDelegate {
create_new_window: bool,
focus_handle: FocusHandle,
open_folders: Vec<OpenFolderEntry>,
- excluded_workspace_ids: HashSet<WorkspaceId>,
+ sibling_workspace_ids: HashSet<WorkspaceId>,
project_connection_options: Option<RemoteConnectionOptions>,
style: ProjectPickerStyle,
) -> Self {
@@ -665,7 +741,7 @@ impl RecentProjectsDelegate {
Self {
workspace,
open_folders,
- excluded_workspace_ids,
+ sibling_workspace_ids,
workspaces: Vec::new(),
filtered_entries: Vec::new(),
selected_index: 0,
@@ -712,32 +788,14 @@ impl PickerDelegate for RecentProjectsDelegate {
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Div {
- let focus_handle = self.focus_handle.clone();
-
h_flex()
.flex_none()
.h_9()
- .pl_2p5()
- .pr_1p5()
+ .px_2p5()
.justify_between()
.border_b_1()
.border_color(cx.theme().colors().border_variant)
.child(editor.render(window, cx))
- .child(
- IconButton::new("add_folder", IconName::Plus)
- .icon_size(IconSize::Small)
- .tooltip(move |_, cx| {
- Tooltip::for_action_in(
- "Add Project to Workspace",
- &workspace::AddFolderToProject,
- &focus_handle,
- cx,
- )
- })
- .on_click(|_, window, cx| {
- window.dispatch_action(workspace::AddFolderToProject.boxed_clone(), cx)
- }),
- )
}
fn match_count(&self) -> usize {
@@ -760,7 +818,11 @@ impl PickerDelegate for RecentProjectsDelegate {
fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
matches!(
self.filtered_entries.get(ix),
- Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::RecentProject(_))
+ Some(
+ ProjectPickerEntry::OpenFolder { .. }
+ | ProjectPickerEntry::OpenProject(_)
+ | ProjectPickerEntry::RecentProject(_)
+ )
)
}
@@ -795,6 +857,38 @@ impl PickerDelegate for RecentProjectsDelegate {
))
};
+ let sibling_candidates: Vec<_> = self
+ .workspaces
+ .iter()
+ .enumerate()
+ .filter(|(_, (id, _, _, _))| self.is_sibling_workspace(*id, cx))
+ .map(|(id, (_, _, paths, _))| {
+ let combined_string = paths
+ .ordered_paths()
+ .map(|path| path.compact().to_string_lossy().into_owned())
+ .collect::<Vec<_>>()
+ .join("");
+ StringMatchCandidate::new(id, &combined_string)
+ })
+ .collect();
+
+ let mut sibling_matches = smol::block_on(fuzzy::match_strings(
+ &sibling_candidates,
+ query,
+ smart_case,
+ true,
+ 100,
+ &Default::default(),
+ cx.background_executor().clone(),
+ ));
+ sibling_matches.sort_unstable_by(|a, b| {
+ b.score
+ .partial_cmp(&a.score)
+ .unwrap_or(std::cmp::Ordering::Equal)
+ .then_with(|| a.candidate_id.cmp(&b.candidate_id))
+ });
+
+ // Build candidates for recent projects (not current, not sibling, not open folder)
let recent_candidates: Vec<_> = self
.workspaces
.iter()
@@ -845,6 +939,33 @@ impl PickerDelegate for RecentProjectsDelegate {
}
}
+ let has_siblings_to_show = if is_empty_query {
+ !sibling_candidates.is_empty()
+ } else {
+ !sibling_matches.is_empty()
+ };
+
+ if has_siblings_to_show {
+ entries.push(ProjectPickerEntry::Header("This Window".into()));
+
+ if is_empty_query {
+ for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() {
+ if self.is_sibling_workspace(*workspace_id, cx) {
+ entries.push(ProjectPickerEntry::OpenProject(StringMatch {
+ candidate_id: id,
+ score: 0.0,
+ positions: Vec::new(),
+ string: String::new(),
+ }));
+ }
+ }
+ } else {
+ for m in sibling_matches {
+ entries.push(ProjectPickerEntry::OpenProject(m));
+ }
+ }
+ }
+
let has_recent_to_show = if is_empty_query {
!recent_candidates.is_empty()
} else {
@@ -899,6 +1020,32 @@ impl PickerDelegate for RecentProjectsDelegate {
}
cx.emit(DismissEvent);
}
+ Some(ProjectPickerEntry::OpenProject(selected_match)) => {
+ let Some((workspace_id, _, _, _)) =
+ self.workspaces.get(selected_match.candidate_id)
+ else {
+ return;
+ };
+ let workspace_id = *workspace_id;
+
+ if let Some(handle) = window.window_handle().downcast::<MultiWorkspace>() {
+ cx.defer(move |cx| {
+ handle
+ .update(cx, |multi_workspace, _window, cx| {
+ let workspace = multi_workspace
+ .workspaces()
+ .iter()
+ .find(|ws| ws.read(cx).database_id() == Some(workspace_id))
+ .cloned();
+ if let Some(workspace) = workspace {
+ multi_workspace.activate(workspace, cx);
+ }
+ })
+ .log_err();
+ });
+ }
+ cx.emit(DismissEvent);
+ }
Some(ProjectPickerEntry::RecentProject(selected_match)) => {
let Some(workspace) = self.workspace.upgrade() else {
return;
@@ -1117,6 +1264,105 @@ impl PickerDelegate for RecentProjectsDelegate {
.into_any_element(),
)
}
+ ProjectPickerEntry::OpenProject(hit) => {
+ let (workspace_id, location, paths, _) = self.workspaces.get(hit.candidate_id)?;
+ let workspace_id = *workspace_id;
+ let ordered_paths: Vec<_> = paths
+ .ordered_paths()
+ .map(|p| p.compact().to_string_lossy().to_string())
+ .collect();
+ let tooltip_path: SharedString = match &location {
+ SerializedWorkspaceLocation::Remote(options) => {
+ let host = options.display_name();
+ if ordered_paths.len() == 1 {
+ format!("{} ({})", ordered_paths[0], host).into()
+ } else {
+ format!("{}\n({})", ordered_paths.join("\n"), host).into()
+ }
+ }
+ _ => ordered_paths.join("\n").into(),
+ };
+
+ let mut path_start_offset = 0;
+ let (match_labels, paths): (Vec<_>, Vec<_>) = paths
+ .ordered_paths()
+ .map(|p| p.compact())
+ .map(|path| {
+ let highlighted_text =
+ highlights_for_path(path.as_ref(), &hit.positions, path_start_offset);
+ path_start_offset += highlighted_text.1.text.len();
+ highlighted_text
+ })
+ .unzip();
+
+ let prefix = match &location {
+ SerializedWorkspaceLocation::Remote(options) => {
+ Some(SharedString::from(options.display_name()))
+ }
+ _ => None,
+ };
+
+ let highlighted_match = HighlightedMatchWithPaths {
+ prefix,
+ match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
+ paths,
+ };
+
+ let icon = icon_for_remote_connection(match location {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(options) => Some(options),
+ });
+
+ let secondary_actions = h_flex()
+ .gap_1()
+ .child(
+ IconButton::new("remove_open_project", IconName::Close)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("Remove Project from Window"))
+ .on_click(cx.listener(move |picker, _, window, cx| {
+ cx.stop_propagation();
+ window.prevent_default();
+ picker
+ .delegate
+ .remove_sibling_workspace(workspace_id, window, cx);
+ let query = picker.query(cx);
+ picker.update_matches(query, window, cx);
+ })),
+ )
+ .into_any_element();
+
+ Some(
+ ListItem::new(ix)
+ .toggle_state(selected)
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .child(
+ h_flex()
+ .id("open_project_info_container")
+ .gap_3()
+ .flex_grow()
+ .when(self.has_any_non_local_projects, |this| {
+ this.child(Icon::new(icon).color(Color::Muted))
+ })
+ .child({
+ let mut highlighted = highlighted_match;
+ if !self.render_paths {
+ highlighted.paths.clear();
+ }
+ highlighted.render(window, cx)
+ })
+ .tooltip(Tooltip::text(tooltip_path)),
+ )
+ .map(|el| {
+ if self.selected_index == ix {
+ el.end_slot(secondary_actions)
+ } else {
+ el.end_hover_slot(secondary_actions)
+ }
+ })
+ .into_any_element(),
+ )
+ }
ProjectPickerEntry::RecentProject(hit) => {
let popover_style = matches!(self.style, ProjectPickerStyle::Popover);
let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?;
@@ -1169,9 +1415,9 @@ impl PickerDelegate for RecentProjectsDelegate {
.gap_px()
.when(is_local, |this| {
this.child(
- IconButton::new("add_to_workspace", IconName::Plus)
+ IconButton::new("add_to_workspace", IconName::FolderPlus)
.icon_size(IconSize::Small)
- .tooltip(Tooltip::text("Add Project to Workspace"))
+ .tooltip(Tooltip::text("Add Project to this Workspace"))
.on_click({
let paths_to_add = paths_to_add.clone();
cx.listener(move |picker, _event, window, cx| {
@@ -1263,9 +1509,9 @@ impl PickerDelegate for RecentProjectsDelegate {
fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
let focus_handle = self.focus_handle.clone();
let popover_style = matches!(self.style, ProjectPickerStyle::Popover);
- let open_folder_section = matches!(
+ let is_already_open_entry = matches!(
self.filtered_entries.get(self.selected_index),
- Some(ProjectPickerEntry::OpenFolder { .. })
+ Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::OpenProject(_))
);
if popover_style {
@@ -1319,7 +1565,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.map(|this| {
- if open_folder_section {
+ if is_already_open_entry {
this.child(
Button::new("activate", "Activate")
.key_binding(KeyBinding::for_action_in(
@@ -1405,7 +1651,7 @@ impl PickerDelegate for RecentProjectsDelegate {
}
}
-fn icon_for_remote_connection(options: Option<&RemoteConnectionOptions>) -> IconName {
+pub(crate) fn icon_for_remote_connection(options: Option<&RemoteConnectionOptions>) -> IconName {
match options {
None => IconName::Screen,
Some(options) => match options {
@@ -1419,7 +1665,7 @@ fn icon_for_remote_connection(options: Option<&RemoteConnectionOptions>) -> Icon
}
// Compute the highlighted text for the name and path
-fn highlights_for_path(
+pub(crate) fn highlights_for_path(
path: &Path,
match_positions: &Vec<usize>,
path_start_offset: usize,
@@ -1518,16 +1764,16 @@ impl RecentProjectsDelegate {
.workspace
.upgrade()
.map(|ws| ws.read(cx).app_state().fs.clone());
+ let db = WorkspaceDb::global(cx);
cx.spawn_in(window, async move |this, cx| {
- WORKSPACE_DB
- .delete_workspace_by_id(workspace_id)
- .await
- .log_err();
+ db.delete_workspace_by_id(workspace_id).await.log_err();
let Some(fs) = fs else { return };
- let workspaces = WORKSPACE_DB
+ let workspaces = db
.recent_workspaces_on_disk(fs.as_ref())
.await
.unwrap_or_default();
+ let workspaces =
+ workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await;
this.update_in(cx, move |picker, window, cx| {
picker.delegate.set_workspaces(workspaces);
picker
@@ -1548,15 +1794,36 @@ impl RecentProjectsDelegate {
}
}
+ fn remove_sibling_workspace(
+ &mut self,
+ workspace_id: WorkspaceId,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) {
+ if let Some(handle) = window.window_handle().downcast::<MultiWorkspace>() {
+ cx.defer(move |cx| {
+ handle
+ .update(cx, |multi_workspace, window, cx| {
+ let index = multi_workspace
+ .workspaces()
+ .iter()
+ .position(|ws| ws.read(cx).database_id() == Some(workspace_id));
+ if let Some(index) = index {
+ multi_workspace.remove_workspace(index, window, cx);
+ }
+ })
+ .log_err();
+ });
+ }
+
+ self.sibling_workspace_ids.remove(&workspace_id);
+ }
+
fn is_current_workspace(
&self,
workspace_id: WorkspaceId,
cx: &mut Context<Picker<Self>>,
) -> bool {
- if self.excluded_workspace_ids.contains(&workspace_id) {
- return true;
- }
-
if let Some(workspace) = self.workspace.upgrade() {
let workspace = workspace.read(cx);
if Some(workspace_id) == workspace.database_id() {
@@ -1567,6 +1834,15 @@ impl RecentProjectsDelegate {
false
}
+ fn is_sibling_workspace(
+ &self,
+ workspace_id: WorkspaceId,
+ cx: &mut Context<Picker<Self>>,
+ ) -> bool {
+ self.sibling_workspace_ids.contains(&workspace_id)
+ && !self.is_current_workspace(workspace_id, cx)
+ }
+
fn is_open_folder(&self, paths: &PathList) -> bool {
if self.open_folders.is_empty() {
return false;
@@ -1589,7 +1865,9 @@ impl RecentProjectsDelegate {
paths: &PathList,
cx: &mut Context<Picker<Self>>,
) -> bool {
- !self.is_current_workspace(workspace_id, cx) && !self.is_open_folder(paths)
+ !self.is_current_workspace(workspace_id, cx)
+ && !self.is_sibling_workspace(workspace_id, cx)
+ && !self.is_open_folder(paths)
}
}
@@ -0,0 +1,424 @@
+use std::collections::HashSet;
+use std::sync::Arc;
+
+use chrono::{DateTime, Utc};
+use fuzzy::{StringMatch, StringMatchCandidate};
+use gpui::{
+ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
+ Subscription, Task, WeakEntity, Window,
+};
+use picker::{
+ Picker, PickerDelegate,
+ highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths},
+};
+use remote::RemoteConnectionOptions;
+use settings::Settings;
+use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*};
+use ui_input::ErasedEditor;
+use util::{ResultExt, paths::PathExt};
+use workspace::{
+ MultiWorkspace, OpenOptions, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceDb,
+ WorkspaceId, notifications::DetachAndPromptErr,
+};
+
+use crate::{highlights_for_path, icon_for_remote_connection, open_remote_project};
+
+pub struct SidebarRecentProjects {
+ pub picker: Entity<Picker<SidebarRecentProjectsDelegate>>,
+ _subscription: Subscription,
+}
+
+impl SidebarRecentProjects {
+ pub fn popover(
+ workspace: WeakEntity<Workspace>,
+ sibling_workspace_ids: HashSet<WorkspaceId>,
+ _focus_handle: FocusHandle,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Entity<Self> {
+ let fs = workspace
+ .upgrade()
+ .map(|ws| ws.read(cx).app_state().fs.clone());
+
+ cx.new(|cx| {
+ let delegate = SidebarRecentProjectsDelegate {
+ workspace,
+ sibling_workspace_ids,
+ workspaces: Vec::new(),
+ filtered_workspaces: Vec::new(),
+ selected_index: 0,
+ has_any_non_local_projects: false,
+ focus_handle: cx.focus_handle(),
+ };
+
+ let picker: Entity<Picker<SidebarRecentProjectsDelegate>> = cx.new(|cx| {
+ Picker::list(delegate, window, cx)
+ .list_measure_all()
+ .show_scrollbar(true)
+ });
+
+ let picker_focus_handle = picker.focus_handle(cx);
+ picker.update(cx, |picker, _| {
+ picker.delegate.focus_handle = picker_focus_handle;
+ });
+
+ let _subscription =
+ cx.subscribe(&picker, |_this: &mut Self, _, _, cx| cx.emit(DismissEvent));
+
+ let db = WorkspaceDb::global(cx);
+ cx.spawn_in(window, async move |this, cx| {
+ let Some(fs) = fs else { return };
+ let workspaces = db
+ .recent_workspaces_on_disk(fs.as_ref())
+ .await
+ .log_err()
+ .unwrap_or_default();
+ let workspaces =
+ workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await;
+ this.update_in(cx, move |this, window, cx| {
+ this.picker.update(cx, move |picker, cx| {
+ picker.delegate.set_workspaces(workspaces);
+ picker.update_matches(picker.query(cx), window, cx)
+ })
+ })
+ .ok();
+ })
+ .detach();
+
+ picker.focus_handle(cx).focus(window, cx);
+
+ Self {
+ picker,
+ _subscription,
+ }
+ })
+ }
+}
+
+impl EventEmitter<DismissEvent> for SidebarRecentProjects {}
+
+impl Focusable for SidebarRecentProjects {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ self.picker.focus_handle(cx)
+ }
+}
+
+impl Render for SidebarRecentProjects {
+ fn render(&mut self, _: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
+ v_flex()
+ .key_context("SidebarRecentProjects")
+ .w(rems(18.))
+ .child(self.picker.clone())
+ }
+}
+
+pub struct SidebarRecentProjectsDelegate {
+ workspace: WeakEntity<Workspace>,
+ sibling_workspace_ids: HashSet<WorkspaceId>,
+ workspaces: Vec<(
+ WorkspaceId,
+ SerializedWorkspaceLocation,
+ PathList,
+ DateTime<Utc>,
+ )>,
+ filtered_workspaces: Vec<StringMatch>,
+ selected_index: usize,
+ has_any_non_local_projects: bool,
+ focus_handle: FocusHandle,
+}
+
+impl SidebarRecentProjectsDelegate {
+ pub fn set_workspaces(
+ &mut self,
+ workspaces: Vec<(
+ WorkspaceId,
+ SerializedWorkspaceLocation,
+ PathList,
+ DateTime<Utc>,
+ )>,
+ ) {
+ self.has_any_non_local_projects = workspaces
+ .iter()
+ .any(|(_, location, _, _)| !matches!(location, SerializedWorkspaceLocation::Local));
+ self.workspaces = workspaces;
+ }
+}
+
+impl EventEmitter<DismissEvent> for SidebarRecentProjectsDelegate {}
+
+impl PickerDelegate for SidebarRecentProjectsDelegate {
+ type ListItem = AnyElement;
+
+ fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
+ "Search recent projects…".into()
+ }
+
+ fn render_editor(
+ &self,
+ editor: &Arc<dyn ErasedEditor>,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) -> Div {
+ h_flex()
+ .flex_none()
+ .h_9()
+ .px_2p5()
+ .justify_between()
+ .border_b_1()
+ .border_color(cx.theme().colors().border_variant)
+ .child(editor.render(window, cx))
+ }
+
+ fn match_count(&self) -> usize {
+ self.filtered_workspaces.len()
+ }
+
+ fn selected_index(&self) -> usize {
+ self.selected_index
+ }
+
+ fn set_selected_index(
+ &mut self,
+ ix: usize,
+ _window: &mut Window,
+ _cx: &mut Context<Picker<Self>>,
+ ) {
+ self.selected_index = ix;
+ }
+
+ fn update_matches(
+ &mut self,
+ query: String,
+ _: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) -> Task<()> {
+ let query = query.trim_start();
+ let smart_case = query.chars().any(|c| c.is_uppercase());
+ let is_empty_query = query.is_empty();
+
+ let current_workspace_id = self
+ .workspace
+ .upgrade()
+ .and_then(|ws| ws.read(cx).database_id());
+
+ let candidates: Vec<_> = self
+ .workspaces
+ .iter()
+ .enumerate()
+ .filter(|(_, (id, _, _, _))| {
+ Some(*id) != current_workspace_id && !self.sibling_workspace_ids.contains(id)
+ })
+ .map(|(id, (_, _, paths, _))| {
+ let combined_string = paths
+ .ordered_paths()
+ .map(|path| path.compact().to_string_lossy().into_owned())
+ .collect::<Vec<_>>()
+ .join("");
+ StringMatchCandidate::new(id, &combined_string)
+ })
+ .collect();
+
+ if is_empty_query {
+ self.filtered_workspaces = candidates
+ .into_iter()
+ .map(|candidate| StringMatch {
+ candidate_id: candidate.id,
+ score: 0.0,
+ positions: Vec::new(),
+ string: candidate.string,
+ })
+ .collect();
+ } else {
+ let mut matches = smol::block_on(fuzzy::match_strings(
+ &candidates,
+ query,
+ smart_case,
+ true,
+ 100,
+ &Default::default(),
+ cx.background_executor().clone(),
+ ));
+ matches.sort_unstable_by(|a, b| {
+ b.score
+ .partial_cmp(&a.score)
+ .unwrap_or(std::cmp::Ordering::Equal)
+ .then_with(|| a.candidate_id.cmp(&b.candidate_id))
+ });
+ self.filtered_workspaces = matches;
+ }
+
+ self.selected_index = 0;
+ Task::ready(())
+ }
+
+ fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+ let Some(hit) = self.filtered_workspaces.get(self.selected_index) else {
+ return;
+ };
+ let Some((_, location, candidate_workspace_paths, _)) =
+ self.workspaces.get(hit.candidate_id)
+ else {
+ return;
+ };
+
+ let Some(workspace) = self.workspace.upgrade() else {
+ return;
+ };
+
+ match location {
+ SerializedWorkspaceLocation::Local => {
+ if let Some(handle) = window.window_handle().downcast::<MultiWorkspace>() {
+ let paths = candidate_workspace_paths.paths().to_vec();
+ cx.defer(move |cx| {
+ if let Some(task) = handle
+ .update(cx, |multi_workspace, window, cx| {
+ multi_workspace.open_project(paths, window, cx)
+ })
+ .log_err()
+ {
+ task.detach_and_log_err(cx);
+ }
+ });
+ }
+ }
+ SerializedWorkspaceLocation::Remote(connection) => {
+ let mut connection = connection.clone();
+ workspace.update(cx, |workspace, cx| {
+ let app_state = workspace.app_state().clone();
+ let replace_window = window.window_handle().downcast::<MultiWorkspace>();
+ let open_options = OpenOptions {
+ replace_window,
+ ..Default::default()
+ };
+ if let RemoteConnectionOptions::Ssh(connection) = &mut connection {
+ crate::RemoteSettings::get_global(cx)
+ .fill_connection_options_from_settings(connection);
+ };
+ let paths = candidate_workspace_paths.paths().to_vec();
+ cx.spawn_in(window, async move |_, cx| {
+ open_remote_project(connection.clone(), paths, app_state, open_options, cx)
+ .await
+ })
+ .detach_and_prompt_err(
+ "Failed to open project",
+ window,
+ cx,
+ |_, _, _| None,
+ );
+ });
+ }
+ }
+ cx.emit(DismissEvent);
+ }
+
+ fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {}
+
+ fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
+ let text = if self.workspaces.is_empty() {
+ "Recently opened projects will show up here"
+ } else {
+ "No matches"
+ };
+ Some(text.into())
+ }
+
+ fn render_match(
+ &self,
+ ix: usize,
+ selected: bool,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) -> Option<Self::ListItem> {
+ let hit = self.filtered_workspaces.get(ix)?;
+ let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?;
+
+ let ordered_paths: Vec<_> = paths
+ .ordered_paths()
+ .map(|p| p.compact().to_string_lossy().to_string())
+ .collect();
+
+ let tooltip_path: SharedString = match &location {
+ SerializedWorkspaceLocation::Remote(options) => {
+ let host = options.display_name();
+ if ordered_paths.len() == 1 {
+ format!("{} ({})", ordered_paths[0], host).into()
+ } else {
+ format!("{}\n({})", ordered_paths.join("\n"), host).into()
+ }
+ }
+ _ => ordered_paths.join("\n").into(),
+ };
+
+ let mut path_start_offset = 0;
+ let match_labels: Vec<_> = paths
+ .ordered_paths()
+ .map(|p| p.compact())
+ .map(|path| {
+ let (label, path_match) =
+ highlights_for_path(path.as_ref(), &hit.positions, path_start_offset);
+ path_start_offset += path_match.text.len();
+ label
+ })
+ .collect();
+
+ let prefix = match &location {
+ SerializedWorkspaceLocation::Remote(options) => {
+ Some(SharedString::from(options.display_name()))
+ }
+ _ => None,
+ };
+
+ let highlighted_match = HighlightedMatchWithPaths {
+ prefix,
+ match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
+ paths: Vec::new(),
+ };
+
+ let icon = icon_for_remote_connection(match location {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(options) => Some(options),
+ });
+
+ Some(
+ ListItem::new(ix)
+ .toggle_state(selected)
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .child(
+ h_flex()
+ .gap_3()
+ .flex_grow()
+ .when(self.has_any_non_local_projects, |this| {
+ this.child(Icon::new(icon).color(Color::Muted))
+ })
+ .child(highlighted_match.render(window, cx)),
+ )
+ .tooltip(Tooltip::text(tooltip_path))
+ .into_any_element(),
+ )
+ }
+
+ fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
+ let focus_handle = self.focus_handle.clone();
+
+ Some(
+ v_flex()
+ .flex_1()
+ .p_1p5()
+ .gap_1()
+ .border_t_1()
+ .border_color(cx.theme().colors().border_variant)
+ .child({
+ let open_action = workspace::Open {
+ create_new_window: false,
+ };
+ Button::new("open_local_folder", "Add Local Project")
+ .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx))
+ .on_click(move |_, window, cx| {
+ window.dispatch_action(open_action.boxed_clone(), cx)
+ })
+ })
+ .into_any(),
+ )
+ }
+}
@@ -1,60 +1,116 @@
use std::collections::BTreeSet;
+const FILTERED_GIT_PROVIDER_HOSTNAMES: &[&str] = &[
+ "dev.azure.com",
+ "bitbucket.org",
+ "chromium.googlesource.com",
+ "codeberg.org",
+ "gitea.com",
+ "gitee.com",
+ "github.com",
+ "gist.github.com",
+ "gitlab.com",
+ "sourcehut.org",
+ "git.sr.ht",
+];
+
pub fn parse_ssh_config_hosts(config: &str) -> BTreeSet<String> {
- let mut hosts = BTreeSet::new();
- let mut needs_another_line = false;
+ parse_host_blocks(config)
+ .into_iter()
+ .flat_map(HostBlock::non_git_provider_hosts)
+ .collect()
+}
+
+struct HostBlock {
+ aliases: BTreeSet<String>,
+ hostname: Option<String>,
+}
+
+impl HostBlock {
+ fn non_git_provider_hosts(self) -> impl Iterator<Item = String> {
+ let hostname = self.hostname;
+ let hostname_ref = hostname.as_deref().map(is_git_provider_domain);
+ self.aliases
+ .into_iter()
+ .filter(move |alias| !hostname_ref.unwrap_or_else(|| is_git_provider_domain(alias)))
+ }
+}
+
+fn parse_host_blocks(config: &str) -> Vec<HostBlock> {
+ let mut blocks = Vec::new();
+ let mut aliases = BTreeSet::new();
+ let mut hostname = None;
+ let mut needs_continuation = false;
+
for line in config.lines() {
let line = line.trim_start();
- if let Some(line) = line.strip_prefix("Host") {
- match line.chars().next() {
- Some('\\') => {
- needs_another_line = true;
- }
- Some('\n' | '\r') => {
- needs_another_line = false;
- }
- Some(c) if c.is_whitespace() => {
- parse_hosts_from(line, &mut hosts);
- }
- Some(_) | None => {
- needs_another_line = false;
- }
- };
-
- if needs_another_line {
- parse_hosts_from(line, &mut hosts);
- needs_another_line = line.trim_end().ends_with('\\');
- } else {
- needs_another_line = false;
+
+ if needs_continuation {
+ needs_continuation = line.trim_end().ends_with('\\');
+ parse_hosts(line, &mut aliases);
+ continue;
+ }
+
+ let Some((keyword, value)) = split_keyword_and_value(line) else {
+ continue;
+ };
+
+ if keyword.eq_ignore_ascii_case("host") {
+ if !aliases.is_empty() {
+ blocks.push(HostBlock { aliases, hostname });
+ aliases = BTreeSet::new();
+ hostname = None;
}
- } else if needs_another_line {
- needs_another_line = line.trim_end().ends_with('\\');
- parse_hosts_from(line, &mut hosts);
- } else {
- needs_another_line = false;
+ parse_hosts(value, &mut aliases);
+ needs_continuation = line.trim_end().ends_with('\\');
+ } else if keyword.eq_ignore_ascii_case("hostname") {
+ hostname = value.split_whitespace().next().map(ToOwned::to_owned);
}
}
- hosts
+ if !aliases.is_empty() {
+ blocks.push(HostBlock { aliases, hostname });
+ }
+
+ blocks
}
-fn parse_hosts_from(line: &str, hosts: &mut BTreeSet<String>) {
+fn parse_hosts(line: &str, hosts: &mut BTreeSet<String>) {
hosts.extend(
line.split_whitespace()
+ .map(|field| field.trim_end_matches('\\'))
.filter(|field| !field.starts_with("!"))
.filter(|field| !field.contains("*"))
+ .filter(|field| *field != "\\")
.filter(|field| !field.is_empty())
.map(|field| field.to_owned()),
);
}
+fn split_keyword_and_value(line: &str) -> Option<(&str, &str)> {
+ let keyword_end = line.find(char::is_whitespace).unwrap_or(line.len());
+ let keyword = &line[..keyword_end];
+ if keyword.is_empty() {
+ return None;
+ }
+
+ let value = line[keyword_end..].trim_start();
+ Some((keyword, value))
+}
+
+fn is_git_provider_domain(host: &str) -> bool {
+ let host = host.to_ascii_lowercase();
+ FILTERED_GIT_PROVIDER_HOSTNAMES.contains(&host.as_str())
+}
+
#[cfg(test)]
mod tests {
use super::*;
+ use indoc::indoc;
#[test]
fn test_thank_you_bjorn3() {
- let hosts = "
+ let hosts = indoc! {"
Host *
AddKeysToAgent yes
UseKeychain yes
@@ -67,19 +123,20 @@ mod tests {
User not_me
Host something
- HostName whatever.tld
+ HostName whatever.tld
- Host linux bsd host3
- User bjorn
+ Host linux bsd host3
+ User bjorn
- Host rpi
- user rpi
- hostname rpi.local
+ Host rpi
+ user rpi
+ hostname rpi.local
- Host \
- somehost \
- anotherhost
- Hostname 192.168.3.3";
+ Host \\
+ somehost \\
+ anotherhost
+ Hostname 192.168.3.3
+ "};
let expected_hosts = BTreeSet::from_iter([
"something".to_owned(),
@@ -93,4 +150,68 @@ mod tests {
assert_eq!(expected_hosts, parse_ssh_config_hosts(hosts));
}
+
+ #[test]
+ fn filters_git_provider_domains_from_hostname() {
+ let hosts = indoc! {"
+ Host github-personal
+ HostName github.com
+
+ Host gitlab-work
+ HostName GITLAB.COM
+
+ Host local
+ HostName example.com
+ "};
+
+ assert_eq!(
+ BTreeSet::from_iter(["local".to_owned()]),
+ parse_ssh_config_hosts(hosts)
+ );
+ }
+
+ #[test]
+ fn falls_back_to_host_when_hostname_is_absent() {
+ let hosts = indoc! {"
+ Host github.com bitbucket.org keep-me
+ User git
+ "};
+
+ assert_eq!(
+ BTreeSet::from_iter(["keep-me".to_owned()]),
+ parse_ssh_config_hosts(hosts)
+ );
+ }
+
+ #[test]
+ fn does_not_fuzzy_match_host_aliases() {
+ let hosts = indoc! {"
+ Host GitHub GitLab Bitbucket GITHUB github
+ User git
+ "};
+
+ assert_eq!(
+ BTreeSet::from_iter([
+ "Bitbucket".to_owned(),
+ "GITHUB".to_owned(),
+ "GitHub".to_owned(),
+ "GitLab".to_owned(),
+ "github".to_owned(),
+ ]),
+ parse_ssh_config_hosts(hosts)
+ );
+ }
+
+ #[test]
+ fn uses_hostname_before_host_filtering() {
+ let hosts = indoc! {"
+ Host github.com keep-me
+ HostName example.com
+ "};
+
+ assert_eq!(
+ BTreeSet::from_iter(["github.com".to_owned(), "keep-me".to_owned()]),
+ parse_ssh_config_hosts(hosts)
+ );
+ }
}
@@ -12,7 +12,7 @@ workspace = true
path = "src/rope.rs"
[dependencies]
-arrayvec = "0.7.1"
+heapless.workspace = true
log.workspace = true
rayon.workspace = true
sum_tree.workspace = true
@@ -1,5 +1,5 @@
use crate::{OffsetUtf16, Point, PointUtf16, TextSummary, Unclipped};
-use arrayvec::ArrayString;
+use heapless::String as ArrayString;
use std::{cmp, ops::Range};
use sum_tree::Bias;
use unicode_segmentation::GraphemeCursor;
@@ -29,7 +29,7 @@ pub struct Chunk {
newlines: Bitmap,
/// If bit[i] is set, then the character at index i is an ascii tab.
tabs: Bitmap,
- pub text: ArrayString<MAX_BASE>,
+ pub text: ArrayString<MAX_BASE, u8>,
}
#[inline(always)]
@@ -47,7 +47,11 @@ impl Chunk {
#[inline(always)]
pub fn new(text: &str) -> Self {
- let text = ArrayString::from(text).unwrap();
+ let text = {
+ let mut buf = ArrayString::new();
+ buf.push_str(text).unwrap();
+ buf
+ };
const CHUNK_SIZE: usize = 8;
@@ -118,7 +122,7 @@ impl Chunk {
self.chars_utf16 |= slice.chars_utf16 << base_ix;
self.newlines |= slice.newlines << base_ix;
self.tabs |= slice.tabs << base_ix;
- self.text.push_str(slice.text);
+ self.text.push_str(slice.text).unwrap();
}
#[inline(always)]
@@ -137,9 +141,9 @@ impl Chunk {
self.newlines = slice.newlines | (self.newlines << shift);
self.tabs = slice.tabs | (self.tabs << shift);
- let mut new_text = ArrayString::<MAX_BASE>::new();
- new_text.push_str(slice.text);
- new_text.push_str(&self.text);
+ let mut new_text = ArrayString::<MAX_BASE, u8>::new();
+ new_text.push_str(slice.text).unwrap();
+ new_text.push_str(&self.text).unwrap();
self.text = new_text;
}
@@ -4,7 +4,7 @@ mod point;
mod point_utf16;
mod unclipped;
-use arrayvec::ArrayVec;
+use heapless::Vec as ArrayVec;
use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
use std::{
cmp, fmt, io, mem,
@@ -184,7 +184,7 @@ impl Rope {
return self.push_large(text);
}
// 16 is enough as otherwise we will hit the branch above
- let mut new_chunks = ArrayVec::<_, NUM_CHUNKS>::new();
+ let mut new_chunks = ArrayVec::<_, NUM_CHUNKS, u8>::new();
while !text.is_empty() {
let mut split_ix = cmp::min(chunk::MAX_BASE, text.len());
@@ -192,7 +192,7 @@ impl Rope {
split_ix -= 1;
}
let (chunk, remainder) = text.split_at(split_ix);
- new_chunks.push(chunk);
+ new_chunks.push(chunk).unwrap();
text = remainder;
}
self.chunks
@@ -699,6 +699,10 @@ impl<'a> Cursor<'a> {
self.offset,
end_offset
);
+ assert!(
+ end_offset <= self.rope.len(),
+ "cannot summarize past end of rope"
+ );
self.chunks.seek_forward(&end_offset, Bias::Right);
self.offset = end_offset;
@@ -711,6 +715,10 @@ impl<'a> Cursor<'a> {
self.offset,
end_offset
);
+ assert!(
+ end_offset <= self.rope.len(),
+ "cannot summarize past end of rope"
+ );
let mut slice = Rope::new();
if let Some(start_chunk) = self.chunks.item() {
@@ -741,6 +749,10 @@ impl<'a> Cursor<'a> {
self.offset,
end_offset
);
+ assert!(
+ end_offset <= self.rope.len(),
+ "cannot summarize past end of rope"
+ );
let mut summary = D::zero(());
if let Some(start_chunk) = self.chunks.item() {
@@ -15,7 +15,7 @@ use picker::{Picker, PickerDelegate};
use platform_title_bar::PlatformTitleBar;
use release_channel::ReleaseChannel;
use rope::Rope;
-use settings::Settings;
+use settings::{ActionSequence, Settings};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -1399,6 +1399,13 @@ impl Render for RulesLibrary {
v_flex()
.id("rules-library")
.key_context("RulesLibrary")
+ .on_action(
+ |action_sequence: &ActionSequence, window: &mut Window, cx: &mut App| {
+ for action in &action_sequence.0 {
+ window.dispatch_action(action.boxed_clone(), cx);
+ }
+ },
+ )
.on_action(cx.listener(|this, &NewRule, window, cx| this.new_rule(window, cx)))
.on_action(
cx.listener(|this, &DeleteRule, window, cx| {
@@ -31,6 +31,7 @@ futures.workspace = true
gpui.workspace = true
language.workspace = true
menu.workspace = true
+multi_buffer.workspace = true
project.workspace = true
serde.workspace = true
serde_json.workspace = true
@@ -6,8 +6,9 @@ use crate::{
ToggleCaseSensitive, ToggleRegex, ToggleReplace, ToggleSelection, ToggleWholeWord,
buffer_search::registrar::WithResultsOrExternalQuery,
search_bar::{
- ActionButtonState, alignment_element, filter_search_results_input, input_base_styles,
- render_action_button, render_text_input,
+ ActionButtonState, HistoryNavigationDirection, alignment_element,
+ filter_search_results_input, input_base_styles, render_action_button, render_text_input,
+ should_navigate_history,
},
};
use any_vec::AnyVec;
@@ -15,6 +16,7 @@ use collections::HashMap;
use editor::{
Editor, EditorSettings, MultiBufferOffset, SplittableEditor, ToggleSplitDiff,
actions::{Backtab, FoldAll, Tab, ToggleFoldAll, UnfoldAll},
+ scroll::Autoscroll,
};
use futures::channel::oneshot;
use gpui::{
@@ -337,13 +339,11 @@ impl Render for BufferSearchBar {
};
let query_column = input_style
- .child(
- div()
- .flex_1()
- .min_w(px(0.))
- .overflow_hidden()
- .child(render_text_input(&self.query_editor, color_override, cx)),
- )
+ .child(div().flex_1().min_w_0().py_1().child(render_text_input(
+ &self.query_editor,
+ color_override,
+ cx,
+ )))
.child(
h_flex()
.flex_none()
@@ -484,39 +484,42 @@ impl Render for BufferSearchBar {
.child(query_column)
.child(mode_column);
- let replace_line =
- should_show_replace_input.then(|| {
- let replace_column = input_base_styles(replacement_border)
- .child(render_text_input(&self.replacement_editor, None, cx));
- let focus_handle = self.replacement_editor.read(cx).focus_handle(cx);
-
- let replace_actions = h_flex()
- .min_w_64()
- .gap_1()
- .child(render_action_button(
- "buffer-search-replace-button",
- IconName::ReplaceNext,
- Default::default(),
- "Replace Next Match",
- &ReplaceNext,
- focus_handle.clone(),
- ))
- .child(render_action_button(
- "buffer-search-replace-button",
- IconName::ReplaceAll,
- Default::default(),
- "Replace All Matches",
- &ReplaceAll,
- focus_handle,
- ));
+ let replace_line = should_show_replace_input.then(|| {
+ let replace_column = input_base_styles(replacement_border).child(
+ div()
+ .flex_1()
+ .py_1()
+ .child(render_text_input(&self.replacement_editor, None, cx)),
+ );
+ let focus_handle = self.replacement_editor.read(cx).focus_handle(cx);
+
+ let replace_actions = h_flex()
+ .min_w_64()
+ .gap_1()
+ .child(render_action_button(
+ "buffer-search-replace-button",
+ IconName::ReplaceNext,
+ Default::default(),
+ "Replace Next Match",
+ &ReplaceNext,
+ focus_handle.clone(),
+ ))
+ .child(render_action_button(
+ "buffer-search-replace-button",
+ IconName::ReplaceAll,
+ Default::default(),
+ "Replace All Matches",
+ &ReplaceAll,
+ focus_handle,
+ ));
- h_flex()
- .w_full()
- .gap_2()
- .when(has_collapse_button, |this| this.child(alignment_element()))
- .child(replace_column)
- .child(replace_actions)
- });
+ h_flex()
+ .w_full()
+ .gap_2()
+ .when(has_collapse_button, |this| this.child(alignment_element()))
+ .child(replace_column)
+ .child(replace_actions)
+ });
let mut key_context = KeyContext::new_with_defaults();
key_context.add("BufferSearchBar");
@@ -831,13 +834,13 @@ impl BufferSearchBar {
cx: &mut Context<Self>,
) -> Self {
let query_editor = cx.new(|cx| {
- let mut editor = Editor::single_line(window, cx);
+ let mut editor = Editor::auto_height(1, 4, window, cx);
editor.set_use_autoclose(false);
editor
});
cx.subscribe_in(&query_editor, window, Self::on_query_editor_event)
.detach();
- let replacement_editor = cx.new(|cx| Editor::single_line(window, cx));
+ let replacement_editor = cx.new(|cx| Editor::auto_height(1, 4, window, cx));
cx.subscribe(&replacement_editor, Self::on_replacement_editor_event)
.detach();
@@ -973,7 +976,9 @@ impl BufferSearchBar {
if deploy.focus {
let mut handle = self.query_editor.focus_handle(cx);
let mut select_query = true;
- if deploy.replace_enabled && handle.is_focused(window) {
+
+ let has_seed_text = self.query_suggestion(window, cx).is_some();
+ if deploy.replace_enabled && has_seed_text {
handle = self.replacement_editor.focus_handle(cx);
select_query = false;
};
@@ -1186,6 +1191,7 @@ impl BufferSearchBar {
let len = query_buffer.len(cx);
query_buffer.edit([(MultiBufferOffset(0)..len, query)], None, cx);
});
+ query_editor.request_autoscroll(Autoscroll::fit(), cx);
});
self.set_search_options(options, cx);
self.clear_matches(window, cx);
@@ -1704,15 +1710,19 @@ impl BufferSearchBar {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ if !should_navigate_history(&self.query_editor, HistoryNavigationDirection::Next, cx) {
+ cx.propagate();
+ return;
+ }
+
if let Some(new_query) = self
.search_history
.next(&mut self.search_history_cursor)
.map(str::to_string)
{
drop(self.search(&new_query, Some(self.search_options), false, window, cx));
- } else {
- self.search_history_cursor.reset();
- drop(self.search("", Some(self.search_options), false, window, cx));
+ } else if let Some(draft) = self.search_history_cursor.take_draft() {
+ drop(self.search(&draft, Some(self.search_options), false, window, cx));
}
}
@@ -1722,6 +1732,11 @@ impl BufferSearchBar {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ if !should_navigate_history(&self.query_editor, HistoryNavigationDirection::Previous, cx) {
+ cx.propagate();
+ return;
+ }
+
if self.query(cx).is_empty()
&& let Some(new_query) = self
.search_history
@@ -1732,9 +1747,10 @@ impl BufferSearchBar {
return;
}
+ let current_query = self.query(cx);
if let Some(new_query) = self
.search_history
- .previous(&mut self.search_history_cursor)
+ .previous(&mut self.search_history_cursor, ¤t_query)
.map(str::to_string)
{
drop(self.search(&new_query, Some(self.search_options), false, window, cx));
@@ -2716,13 +2732,13 @@ mod tests {
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
- // Next history query after the latest should set the query to the empty string.
+ // Next history query after the latest should preserve the current query.
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.next_history_query(&NextHistoryQuery, window, cx);
});
cx.background_executor.run_until_parked();
search_bar.update(cx, |search_bar, cx| {
- assert_eq!(search_bar.query(cx), "");
+ assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -2730,17 +2746,17 @@ mod tests {
});
cx.background_executor.run_until_parked();
search_bar.update(cx, |search_bar, cx| {
- assert_eq!(search_bar.query(cx), "");
+ assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
- // First previous query for empty current query should set the query to the latest.
+ // Previous query should navigate backwards through history.
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, window, cx);
});
cx.background_executor.run_until_parked();
search_bar.update(cx, |search_bar, cx| {
- assert_eq!(search_bar.query(cx), "c");
+ assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
@@ -2750,7 +2766,7 @@ mod tests {
});
cx.background_executor.run_until_parked();
search_bar.update(cx, |search_bar, cx| {
- assert_eq!(search_bar.query(cx), "b");
+ assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
@@ -2831,11 +2847,71 @@ mod tests {
});
cx.background_executor.run_until_parked();
search_bar.update(cx, |search_bar, cx| {
- assert_eq!(search_bar.query(cx), "");
+ assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
}
+ #[perf]
+ #[gpui::test]
+ async fn test_search_query_history_autoscroll(cx: &mut TestAppContext) {
+ let (_editor, search_bar, cx) = init_test(cx);
+
+ // Add a long multi-line query that exceeds the editor's max
+ // visible height (4 lines), then a short query.
+ let long_query = "line1\nline2\nline3\nline4\nline5\nline6";
+ search_bar
+ .update_in(cx, |search_bar, window, cx| {
+ search_bar.search(long_query, None, true, window, cx)
+ })
+ .await
+ .unwrap();
+ search_bar
+ .update_in(cx, |search_bar, window, cx| {
+ search_bar.search("short", None, true, window, cx)
+ })
+ .await
+ .unwrap();
+
+ // Navigate back to the long entry. Since "short" is single-line,
+ // the history navigation is allowed.
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, window, cx);
+ });
+ cx.background_executor.run_until_parked();
+ search_bar.update(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), long_query);
+ });
+
+ // The cursor should be scrolled into view despite the content
+ // exceeding the editor's max visible height.
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ let snapshot = search_bar
+ .query_editor
+ .update(cx, |editor, cx| editor.snapshot(window, cx));
+ let cursor_row = search_bar
+ .query_editor
+ .read(cx)
+ .selections
+ .newest_display(&snapshot)
+ .head()
+ .row();
+ let scroll_top = search_bar
+ .query_editor
+ .update(cx, |editor, cx| editor.scroll_position(cx).y);
+ let visible_lines = search_bar
+ .query_editor
+ .read(cx)
+ .visible_line_count()
+ .unwrap_or(0.0);
+ let scroll_bottom = scroll_top + visible_lines;
+ assert!(
+ (cursor_row.0 as f64) < scroll_bottom,
+ "cursor row {cursor_row:?} should be visible (scroll range {scroll_top}..{scroll_bottom})"
+ );
+ });
+ }
+
#[perf]
#[gpui::test]
async fn test_replace_simple(cx: &mut TestAppContext) {
@@ -3114,6 +3190,47 @@ mod tests {
.await;
}
+ #[gpui::test]
+ async fn test_deploy_replace_focuses_replacement_editor(cx: &mut TestAppContext) {
+ init_globals(cx);
+ let (editor, search_bar, cx) = init_test(cx);
+
+ editor.update_in(cx, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(DisplayRow(0), 8)..DisplayPoint::new(DisplayRow(0), 16)
+ ])
+ });
+ });
+
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.deploy(
+ &Deploy {
+ focus: true,
+ replace_enabled: true,
+ selection_search_enabled: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.run_until_parked();
+
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ assert!(
+ search_bar
+ .replacement_editor
+ .focus_handle(cx)
+ .is_focused(window),
+ "replacement editor should be focused when deploying replace with a selection",
+ );
+ assert!(
+ !search_bar.query_editor.focus_handle(cx).is_focused(window),
+ "search editor should not be focused when replacement editor is focused",
+ );
+ });
+ }
+
#[perf]
#[gpui::test]
async fn test_find_matches_in_selections_singleton_buffer_multiple_selections(
@@ -4,15 +4,15 @@ use crate::{
ToggleCaseSensitive, ToggleIncludeIgnored, ToggleRegex, ToggleReplace, ToggleWholeWord,
buffer_search::Deploy,
search_bar::{
- ActionButtonState, alignment_element, input_base_styles, render_action_button,
- render_text_input,
+ ActionButtonState, HistoryNavigationDirection, alignment_element, input_base_styles,
+ render_action_button, render_text_input, should_navigate_history,
},
};
use anyhow::Context as _;
use collections::HashMap;
use editor::{
- Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey,
- SelectionEffects,
+ Anchor, Editor, EditorEvent, EditorSettings, ExcerptId, MAX_TAB_TITLE_LEN, MultiBuffer,
+ PathKey, SelectionEffects,
actions::{Backtab, FoldAll, SelectAll, Tab, UnfoldAll},
items::active_match_index,
multibuffer_context_lines,
@@ -27,6 +27,7 @@ use gpui::{
use itertools::Itertools;
use language::{Buffer, Language};
use menu::Confirm;
+use multi_buffer;
use project::{
Project, ProjectPath, SearchResults,
search::{SearchInputKind, SearchQuery},
@@ -239,6 +240,7 @@ pub struct ProjectSearch {
search_history_cursor: SearchHistoryCursor,
search_included_history_cursor: SearchHistoryCursor,
search_excluded_history_cursor: SearchHistoryCursor,
+ _excerpts_subscription: Subscription,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -264,6 +266,7 @@ pub struct ProjectSearchView {
excluded_files_editor: Entity<Editor>,
filters_enabled: bool,
replace_enabled: bool,
+ pending_replace_all: bool,
included_opened_only: bool,
regex_language: Option<Arc<Language>>,
_subscriptions: Vec<Subscription>,
@@ -283,10 +286,12 @@ pub struct ProjectSearchBar {
impl ProjectSearch {
pub fn new(project: Entity<Project>, cx: &mut Context<Self>) -> Self {
let capability = project.read(cx).capability();
+ let excerpts = cx.new(|_| MultiBuffer::new(capability));
+ let subscription = Self::subscribe_to_excerpts(&excerpts, cx);
Self {
project,
- excerpts: cx.new(|_| MultiBuffer::new(capability)),
+ excerpts,
pending_search: Default::default(),
match_ranges: Default::default(),
active_query: None,
@@ -297,27 +302,85 @@ impl ProjectSearch {
search_history_cursor: Default::default(),
search_included_history_cursor: Default::default(),
search_excluded_history_cursor: Default::default(),
+ _excerpts_subscription: subscription,
}
}
fn clone(&self, cx: &mut Context<Self>) -> Entity<Self> {
- cx.new(|cx| Self {
- project: self.project.clone(),
- excerpts: self
+ cx.new(|cx| {
+ let excerpts = self
.excerpts
- .update(cx, |excerpts, cx| cx.new(|cx| excerpts.clone(cx))),
- pending_search: Default::default(),
- match_ranges: self.match_ranges.clone(),
- active_query: self.active_query.clone(),
- last_search_query_text: self.last_search_query_text.clone(),
- search_id: self.search_id,
- no_results: self.no_results,
- limit_reached: self.limit_reached,
- search_history_cursor: self.search_history_cursor.clone(),
- search_included_history_cursor: self.search_included_history_cursor.clone(),
- search_excluded_history_cursor: self.search_excluded_history_cursor.clone(),
+ .update(cx, |excerpts, cx| cx.new(|cx| excerpts.clone(cx)));
+ let subscription = Self::subscribe_to_excerpts(&excerpts, cx);
+
+ Self {
+ project: self.project.clone(),
+ excerpts,
+ pending_search: Default::default(),
+ match_ranges: self.match_ranges.clone(),
+ active_query: self.active_query.clone(),
+ last_search_query_text: self.last_search_query_text.clone(),
+ search_id: self.search_id,
+ no_results: self.no_results,
+ limit_reached: self.limit_reached,
+ search_history_cursor: self.search_history_cursor.clone(),
+ search_included_history_cursor: self.search_included_history_cursor.clone(),
+ search_excluded_history_cursor: self.search_excluded_history_cursor.clone(),
+ _excerpts_subscription: subscription,
+ }
+ })
+ }
+ fn subscribe_to_excerpts(
+ excerpts: &Entity<MultiBuffer>,
+ cx: &mut Context<Self>,
+ ) -> Subscription {
+ cx.subscribe(excerpts, |this, _, event, cx| {
+ if matches!(event, multi_buffer::Event::FileHandleChanged) {
+ this.remove_deleted_buffers(cx);
+ }
})
}
+
+ fn remove_deleted_buffers(&mut self, cx: &mut Context<Self>) {
+ let (deleted_paths, removed_excerpt_ids) = {
+ let excerpts = self.excerpts.read(cx);
+ let deleted_paths: Vec<PathKey> = excerpts
+ .paths()
+ .filter(|path| {
+ excerpts.buffer_for_path(path, cx).is_some_and(|buffer| {
+ buffer
+ .read(cx)
+ .file()
+ .is_some_and(|file| file.disk_state().is_deleted())
+ })
+ })
+ .cloned()
+ .collect();
+
+ let removed_excerpt_ids: collections::HashSet<ExcerptId> = deleted_paths
+ .iter()
+ .flat_map(|path| excerpts.excerpts_for_path(path))
+ .collect();
+
+ (deleted_paths, removed_excerpt_ids)
+ };
+
+ if deleted_paths.is_empty() {
+ return;
+ }
+
+ self.excerpts.update(cx, |excerpts, cx| {
+ for path in deleted_paths {
+ excerpts.remove_excerpts_for_path(path, cx);
+ }
+ });
+
+ self.match_ranges
+ .retain(|range| !removed_excerpt_ids.contains(&range.start.excerpt_id));
+
+ cx.notify();
+ }
+
fn cursor(&self, kind: SearchInputKind) -> &SearchHistoryCursor {
match kind {
SearchInputKind::Query => &self.search_history_cursor,
@@ -735,6 +798,9 @@ impl ProjectSearchView {
}
fn replace_next(&mut self, _: &ReplaceNext, window: &mut Window, cx: &mut Context<Self>) {
+ if self.entity.read(cx).pending_search.is_some() {
+ return;
+ }
if let Some(last_search_query_text) = &self.entity.read(cx).last_search_query_text
&& self.query_editor.read(cx).text(cx) != *last_search_query_text
{
@@ -762,14 +828,24 @@ impl ProjectSearchView {
self.select_match(Direction::Next, window, cx)
}
}
+
fn replace_all(&mut self, _: &ReplaceAll, window: &mut Window, cx: &mut Context<Self>) {
- if let Some(last_search_query_text) = &self.entity.read(cx).last_search_query_text
- && self.query_editor.read(cx).text(cx) != *last_search_query_text
- {
- // search query has changed, restart search and bail
+ if self.entity.read(cx).pending_search.is_some() {
+ self.pending_replace_all = true;
+ return;
+ }
+ let query_text = self.query_editor.read(cx).text(cx);
+ let query_is_stale =
+ self.entity.read(cx).last_search_query_text.as_deref() != Some(query_text.as_str());
+ if query_is_stale {
+ self.pending_replace_all = true;
self.search(cx);
+ if self.entity.read(cx).pending_search.is_none() {
+ self.pending_replace_all = false;
+ }
return;
}
+ self.pending_replace_all = false;
if self.active_match_index.is_none() {
return;
}
@@ -858,7 +934,7 @@ impl ProjectSearchView {
}));
let query_editor = cx.new(|cx| {
- let mut editor = Editor::single_line(window, cx);
+ let mut editor = Editor::auto_height(1, 4, window, cx);
editor.set_placeholder_text("Search all files…", window, cx);
editor.set_text(query_text, window, cx);
editor
@@ -881,7 +957,7 @@ impl ProjectSearchView {
}),
);
let replacement_editor = cx.new(|cx| {
- let mut editor = Editor::single_line(window, cx);
+ let mut editor = Editor::auto_height(1, 4, window, cx);
editor.set_placeholder_text("Replace in project…", window, cx);
if let Some(text) = replacement_text {
editor.set_text(text, window, cx);
@@ -981,6 +1057,7 @@ impl ProjectSearchView {
excluded_files_editor,
filters_enabled,
replace_enabled: false,
+ pending_replace_all: false,
included_opened_only: false,
regex_language: None,
_subscriptions: subscriptions,
@@ -1474,8 +1551,9 @@ impl ProjectSearchView {
SearchInputKind::Exclude => &self.excluded_files_editor,
};
- editor.update(cx, |included_editor, cx| {
- included_editor.set_text(text, window, cx)
+ editor.update(cx, |editor, cx| {
+ editor.set_text(text, window, cx);
+ editor.request_autoscroll(Autoscroll::fit(), cx);
});
}
@@ -1521,6 +1599,10 @@ impl ProjectSearchView {
cx.emit(ViewEvent::UpdateTab);
cx.notify();
+
+ if self.pending_replace_all && self.entity.read(cx).pending_search.is_none() {
+ self.replace_all(&ReplaceAll, window, cx);
+ }
}
fn update_match_index(&mut self, cx: &mut Context<Self>) {
@@ -1916,6 +1998,11 @@ impl ProjectSearchBar {
),
] {
if editor.focus_handle(cx).is_focused(window) {
+ if !should_navigate_history(&editor, HistoryNavigationDirection::Next, cx) {
+ cx.propagate();
+ return;
+ }
+
let new_query = search_view.entity.update(cx, |model, cx| {
let project = model.project.clone();
@@ -1925,13 +2012,14 @@ impl ProjectSearchBar {
.next(model.cursor_mut(kind))
.map(str::to_string)
}) {
- new_query
+ Some(new_query)
} else {
- model.cursor_mut(kind).reset();
- String::new()
+ model.cursor_mut(kind).take_draft()
}
});
- search_view.set_search_editor(kind, &new_query, window, cx);
+ if let Some(new_query) = new_query {
+ search_view.set_search_editor(kind, &new_query, window, cx);
+ }
}
}
});
@@ -1958,6 +2046,15 @@ impl ProjectSearchBar {
),
] {
if editor.focus_handle(cx).is_focused(window) {
+ if !should_navigate_history(
+ &editor,
+ HistoryNavigationDirection::Previous,
+ cx,
+ ) {
+ cx.propagate();
+ return;
+ }
+
if editor.read(cx).text(cx).is_empty()
&& let Some(new_query) = search_view
.entity
@@ -1972,12 +2069,13 @@ impl ProjectSearchBar {
return;
}
+ let current_query = editor.read(cx).text(cx);
if let Some(new_query) = search_view.entity.update(cx, |model, cx| {
let project = model.project.clone();
project.update(cx, |project, _| {
project
.search_history_mut(kind)
- .previous(model.cursor_mut(kind))
+ .previous(model.cursor_mut(kind), ¤t_query)
.map(str::to_string)
})
}) {
@@ -2076,7 +2174,11 @@ impl Render for ProjectSearchBar {
.on_action(
cx.listener(|this, action, window, cx| this.next_history_query(action, window, cx)),
)
- .child(render_text_input(&search.query_editor, color_override, cx))
+ .child(div().flex_1().py_1().child(render_text_input(
+ &search.query_editor,
+ color_override,
+ cx,
+ )))
.child(
h_flex()
.gap_1()
@@ -2234,18 +2336,22 @@ impl Render for ProjectSearchBar {
.child(mode_column);
let replace_line = search.replace_enabled.then(|| {
- let replace_column = input_base_styles(InputPanel::Replacement)
- .child(render_text_input(&search.replacement_editor, None, cx));
+ let replace_column = input_base_styles(InputPanel::Replacement).child(
+ div().flex_1().py_1().child(render_text_input(
+ &search.replacement_editor,
+ None,
+ cx,
+ )),
+ );
let focus_handle = search.replacement_editor.read(cx).focus_handle(cx);
-
let replace_actions = h_flex()
.min_w_64()
.gap_1()
.child(render_action_button(
"project-search-replace-button",
IconName::ReplaceNext,
- Default::default(),
+ is_search_underway.then_some(ActionButtonState::Disabled),
"Replace Next Match",
&ReplaceNext,
focus_handle.clone(),
@@ -2509,7 +2615,7 @@ pub mod tests {
use gpui::{Action, TestAppContext, VisualTestContext, WindowHandle};
use language::{FakeLspAdapter, rust_lang};
use pretty_assertions::assert_eq;
- use project::FakeFs;
+ use project::{FakeFs, Fs};
use serde_json::json;
use settings::{
InlayHintSettingsContent, SettingsStore, ThemeColorsContent, ThemeStyleContent,
@@ -3835,7 +3941,7 @@ pub mod tests {
})
.unwrap();
- // Next history query after the latest should set the query to the empty string.
+ // Next history query after the latest should preserve the current query.
window
.update(cx, |_, window, cx| {
search_bar.update(cx, |search_bar, cx| {
@@ -3847,7 +3953,10 @@ pub mod tests {
window
.update(cx, |_, _, cx| {
search_view.update(cx, |search_view, cx| {
- assert_eq!(search_view.query_editor.read(cx).text(cx), "");
+ assert_eq!(
+ search_view.query_editor.read(cx).text(cx),
+ "JUST_TEXT_INPUT"
+ );
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
})
@@ -3863,13 +3972,16 @@ pub mod tests {
window
.update(cx, |_, _, cx| {
search_view.update(cx, |search_view, cx| {
- assert_eq!(search_view.query_editor.read(cx).text(cx), "");
+ assert_eq!(
+ search_view.query_editor.read(cx).text(cx),
+ "JUST_TEXT_INPUT"
+ );
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
})
.unwrap();
- // First previous query for empty current query should set the query to the latest submitted one.
+ // Previous query should navigate backwards through history.
window
.update(cx, |_, window, cx| {
search_bar.update(cx, |search_bar, cx| {
@@ -3881,7 +3993,7 @@ pub mod tests {
window
.update(cx, |_, _, cx| {
search_view.update(cx, |search_view, cx| {
- assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
})
@@ -3899,7 +4011,7 @@ pub mod tests {
window
.update(cx, |_, _, cx| {
search_view.update(cx, |search_view, cx| {
- assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
})
@@ -4053,11 +4165,75 @@ pub mod tests {
window
.update(cx, |_, _, cx| {
search_view.update(cx, |search_view, cx| {
- assert_eq!(search_view.query_editor.read(cx).text(cx), "");
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
})
.unwrap();
+
+ // Typing text without running a search, then navigating history, should allow
+ // restoring the draft when pressing next past the end.
+ window
+ .update(cx, |_, window, cx| {
+ search_view.update(cx, |search_view, cx| {
+ search_view.query_editor.update(cx, |query_editor, cx| {
+ query_editor.set_text("unsaved draft", window, cx)
+ });
+ })
+ })
+ .unwrap();
+ cx.background_executor.run_until_parked();
+
+ // Navigate up into history — the draft should be stashed.
+ window
+ .update(cx, |_, window, cx| {
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.focus_search(window, cx);
+ search_bar.previous_history_query(&PreviousHistoryQuery, window, cx);
+ });
+ })
+ .unwrap();
+ window
+ .update(cx, |_, _, cx| {
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
+ });
+ })
+ .unwrap();
+
+ // Navigate forward through history.
+ window
+ .update(cx, |_, window, cx| {
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.focus_search(window, cx);
+ search_bar.next_history_query(&NextHistoryQuery, window, cx);
+ });
+ })
+ .unwrap();
+ window
+ .update(cx, |_, _, cx| {
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
+ });
+ })
+ .unwrap();
+
+ // Navigate past the end — the draft should be restored.
+ window
+ .update(cx, |_, window, cx| {
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.focus_search(window, cx);
+ search_bar.next_history_query(&NextHistoryQuery, window, cx);
+ });
+ })
+ .unwrap();
+ window
+ .update(cx, |_, _, cx| {
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "unsaved draft");
+ });
+ })
+ .unwrap();
}
#[perf]
@@ -4243,9 +4419,6 @@ pub mod tests {
cx.background_executor.run_until_parked();
select_next_history_item(&search_bar_2, cx);
- assert_eq!(active_query(&search_view_2, cx), "");
-
- select_prev_history_item(&search_bar_2, cx);
assert_eq!(active_query(&search_view_2, cx), "THREE");
select_prev_history_item(&search_bar_2, cx);
@@ -4257,6 +4430,9 @@ pub mod tests {
select_prev_history_item(&search_bar_2, cx);
assert_eq!(active_query(&search_view_2, cx), "ONE");
+ select_prev_history_item(&search_bar_2, cx);
+ assert_eq!(active_query(&search_view_2, cx), "ONE");
+
// Search view 1 should now see the query from search view 2.
assert_eq!(active_query(&search_view_1, cx), "ONE");
@@ -4268,7 +4444,7 @@ pub mod tests {
assert_eq!(active_query(&search_view_2, cx), "THREE");
select_next_history_item(&search_bar_2, cx);
- assert_eq!(active_query(&search_view_2, cx), "");
+ assert_eq!(active_query(&search_view_2, cx), "THREE");
select_next_history_item(&search_bar_1, cx);
assert_eq!(active_query(&search_view_1, cx), "TWO");
@@ -4277,7 +4453,7 @@ pub mod tests {
assert_eq!(active_query(&search_view_1, cx), "THREE");
select_next_history_item(&search_bar_1, cx);
- assert_eq!(active_query(&search_view_1, cx), "");
+ assert_eq!(active_query(&search_view_1, cx), "THREE");
}
#[perf]
@@ -4877,6 +5053,91 @@ pub mod tests {
.unwrap();
}
+ #[gpui::test]
+ async fn test_deleted_file_removed_from_search_results(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ path!("/dir"),
+ json!({
+ "file_a.txt": "hello world",
+ "file_b.txt": "hello universe",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+ let window =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let search = cx.new(|cx| ProjectSearch::new(project.clone(), cx));
+ let search_view = cx.add_window(|window, cx| {
+ ProjectSearchView::new(workspace.downgrade(), search.clone(), window, cx, None)
+ });
+
+ perform_search(search_view, "hello", cx);
+
+ search_view
+ .update(cx, |search_view, _window, cx| {
+ let match_count = search_view.entity.read(cx).match_ranges.len();
+ assert_eq!(match_count, 2, "Should have matches from both files");
+ })
+ .unwrap();
+
+ // Delete file_b.txt
+ fs.remove_file(
+ path!("/dir/file_b.txt").as_ref(),
+ fs::RemoveOptions::default(),
+ )
+ .await
+ .unwrap();
+ cx.run_until_parked();
+
+ // Verify deleted file's results are removed proactively
+ search_view
+ .update(cx, |search_view, _window, cx| {
+ let results_text = search_view
+ .results_editor
+ .update(cx, |editor, cx| editor.display_text(cx));
+ assert!(
+ !results_text.contains("universe"),
+ "Deleted file's content should be removed from results, got: {results_text}"
+ );
+ assert!(
+ results_text.contains("world"),
+ "Remaining file's content should still be present, got: {results_text}"
+ );
+ })
+ .unwrap();
+
+ // Re-run the search and verify deleted file stays gone
+ perform_search(search_view, "hello", cx);
+
+ search_view
+ .update(cx, |search_view, _window, cx| {
+ let results_text = search_view
+ .results_editor
+ .update(cx, |editor, cx| editor.display_text(cx));
+ assert!(
+ !results_text.contains("universe"),
+ "Deleted file should not reappear after re-search, got: {results_text}"
+ );
+ assert!(
+ results_text.contains("world"),
+ "Remaining file should still be found, got: {results_text}"
+ );
+ assert_eq!(
+ search_view.entity.read(cx).match_ranges.len(),
+ 1,
+ "Should only have match from the remaining file"
+ );
+ })
+ .unwrap();
+ }
+
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings = SettingsStore::test(cx);
@@ -1,10 +1,37 @@
-use editor::{Editor, EditorElement, EditorStyle};
-use gpui::{Action, Entity, FocusHandle, Hsla, IntoElement, TextStyle};
+use editor::{Editor, EditorElement, EditorStyle, MultiBufferOffset, ToOffset};
+use gpui::{Action, App, Entity, FocusHandle, Hsla, IntoElement, TextStyle};
use settings::Settings;
use theme::ThemeSettings;
use ui::{IconButton, IconButtonShape};
use ui::{Tooltip, prelude::*};
+pub(super) enum HistoryNavigationDirection {
+ Previous,
+ Next,
+}
+
+pub(super) fn should_navigate_history(
+ editor: &Entity<Editor>,
+ direction: HistoryNavigationDirection,
+ cx: &App,
+) -> bool {
+ let editor_ref = editor.read(cx);
+ let snapshot = editor_ref.buffer().read(cx).snapshot(cx);
+ if snapshot.max_point().row == 0 {
+ return true;
+ }
+ let selections = editor_ref.selections.disjoint_anchors();
+ if let [selection] = selections {
+ let offset = selection.end.to_offset(&snapshot);
+ match direction {
+ HistoryNavigationDirection::Previous => offset == MultiBufferOffset(0),
+ HistoryNavigationDirection::Next => offset == snapshot.len(),
+ }
+ } else {
+ true
+ }
+}
+
pub(super) enum ActionButtonState {
Disabled,
Toggled,
@@ -43,7 +70,7 @@ pub(crate) fn input_base_styles(border_color: Hsla, map: impl FnOnce(Div) -> Div
h_flex()
.map(map)
.min_w_32()
- .h_8()
+ .min_h_8()
.pl_2()
.pr_1()
.border_1()
@@ -1,4 +1,4 @@
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use gpui::{App, AppContext as _, Context, Subscription, Task, WindowId};
use util::ResultExt;
@@ -12,20 +12,19 @@ const SESSION_ID_KEY: &str = "session_id";
const SESSION_WINDOW_STACK_KEY: &str = "session_window_stack";
impl Session {
- pub async fn new(session_id: String) -> Self {
- let old_session_id = KEY_VALUE_STORE.read_kvp(SESSION_ID_KEY).ok().flatten();
+ pub async fn new(session_id: String, db: KeyValueStore) -> Self {
+ let old_session_id = db.read_kvp(SESSION_ID_KEY).ok().flatten();
- KEY_VALUE_STORE
- .write_kvp(SESSION_ID_KEY.to_string(), session_id.clone())
+ db.write_kvp(SESSION_ID_KEY.to_string(), session_id.clone())
.await
.log_err();
- let old_window_ids = KEY_VALUE_STORE
+ let old_window_ids = db
.read_kvp(SESSION_WINDOW_STACK_KEY)
.ok()
.flatten()
.and_then(|json| serde_json::from_str::<Vec<u64>>(&json).ok())
- .map(|vec| {
+ .map(|vec: Vec<u64>| {
vec.into_iter()
.map(WindowId::from)
.collect::<Vec<WindowId>>()
@@ -72,25 +71,28 @@ impl AppSession {
let _subscriptions = vec![cx.on_app_quit(Self::app_will_quit)];
#[cfg(not(any(test, feature = "test-support")))]
- let _serialization_task = cx.spawn(async move |_, cx| {
- // Disabled in tests: the infinite loop bypasses "parking forbidden" checks,
- // causing tests to hang instead of panicking.
- {
- let mut current_window_stack = Vec::new();
- loop {
- if let Some(windows) = cx.update(|cx| window_stack(cx))
- && windows != current_window_stack
- {
- store_window_stack(&windows).await;
- current_window_stack = windows;
+ let _serialization_task = {
+ let db = KeyValueStore::global(cx);
+ cx.spawn(async move |_, cx| {
+ // Disabled in tests: the infinite loop bypasses "parking forbidden" checks,
+ // causing tests to hang instead of panicking.
+ {
+ let mut current_window_stack = Vec::new();
+ loop {
+ if let Some(windows) = cx.update(|cx| window_stack(cx))
+ && windows != current_window_stack
+ {
+ store_window_stack(db.clone(), &windows).await;
+ current_window_stack = windows;
+ }
+
+ cx.background_executor()
+ .timer(std::time::Duration::from_millis(500))
+ .await;
}
-
- cx.background_executor()
- .timer(std::time::Duration::from_millis(500))
- .await;
}
- }
- });
+ })
+ };
#[cfg(any(test, feature = "test-support"))]
let _serialization_task = Task::ready(());
@@ -104,7 +106,8 @@ impl AppSession {
fn app_will_quit(&mut self, cx: &mut Context<Self>) -> Task<()> {
if let Some(window_stack) = window_stack(cx) {
- cx.background_spawn(async move { store_window_stack(&window_stack).await })
+ let db = KeyValueStore::global(cx);
+ cx.background_spawn(async move { store_window_stack(db, &window_stack).await })
} else {
Task::ready(())
}
@@ -137,10 +140,9 @@ fn window_stack(cx: &App) -> Option<Vec<u64>> {
)
}
-async fn store_window_stack(windows: &[u64]) {
+async fn store_window_stack(db: KeyValueStore, windows: &[u64]) {
if let Ok(window_ids_json) = serde_json::to_string(windows) {
- KEY_VALUE_STORE
- .write_kvp(SESSION_WINDOW_STACK_KEY.to_string(), window_ids_json)
+ db.write_kvp(SESSION_WINDOW_STACK_KEY.to_string(), window_ids_json)
.await
.log_err();
}
@@ -4,7 +4,7 @@ use fs::Fs;
use gpui::{
Action, ActionBuildError, App, InvalidKeystrokeError, KEYSTROKE_PARSE_EXPECTED_MESSAGE,
KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, KeybindingKeystroke, Keystroke,
- NoAction, SharedString, generate_list_of_all_registered_actions, register_action,
+ NoAction, SharedString, Unbind, generate_list_of_all_registered_actions, register_action,
};
use schemars::{JsonSchema, json_schema};
use serde::Deserialize;
@@ -73,6 +73,10 @@ pub struct KeymapSection {
/// on macOS. See the documentation for more details.
#[serde(default)]
use_key_equivalents: bool,
+ /// This keymap section's unbindings, as a JSON object mapping keystrokes to actions. These are
+ /// parsed before `bindings`, so bindings later in the same section can still take precedence.
+ #[serde(default)]
+ unbind: Option<IndexMap<String, UnbindTargetAction>>,
/// This keymap section's bindings, as a JSON object mapping keystrokes to actions. The
/// keystrokes key is a string representing a sequence of keystrokes to type, where the
/// keystrokes are separated by whitespace. Each keystroke is a sequence of modifiers (`ctrl`,
@@ -135,6 +139,20 @@ impl JsonSchema for KeymapAction {
}
}
+#[derive(Debug, Deserialize, Default, Clone)]
+#[serde(transparent)]
+pub struct UnbindTargetAction(Value);
+
+impl JsonSchema for UnbindTargetAction {
+ fn schema_name() -> Cow<'static, str> {
+ "UnbindTargetAction".into()
+ }
+
+ fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!(true)
+ }
+}
+
#[derive(Debug)]
#[must_use]
pub enum KeymapFileLoadResult {
@@ -231,6 +249,7 @@ impl KeymapFile {
for KeymapSection {
context,
use_key_equivalents,
+ unbind,
bindings,
unrecognized_fields,
} in keymap_file.0.iter()
@@ -244,7 +263,7 @@ impl KeymapFile {
// Leading space is to separate from the message indicating which section
// the error occurred in.
errors.push((
- context,
+ context.clone(),
format!(" Parse error in section `context` field: {}", err),
));
continue;
@@ -263,6 +282,38 @@ impl KeymapFile {
.unwrap();
}
+ if let Some(unbind) = unbind {
+ for (keystrokes, action) in unbind {
+ let result = Self::load_unbinding(
+ keystrokes,
+ action,
+ context_predicate.clone(),
+ *use_key_equivalents,
+ cx,
+ );
+ match result {
+ Ok(key_binding) => {
+ key_bindings.push(key_binding);
+ }
+ Err(err) => {
+ let mut lines = err.lines();
+ let mut indented_err = lines.next().unwrap().to_string();
+ for line in lines {
+ indented_err.push_str(" ");
+ indented_err.push_str(line);
+ indented_err.push_str("\n");
+ }
+ write!(
+ section_errors,
+ "\n\n- In unbind {}, {indented_err}",
+ MarkdownInlineCode(&format!("\"{}\"", keystrokes))
+ )
+ .unwrap();
+ }
+ }
+ }
+ }
+
if let Some(bindings) = bindings {
for (keystrokes, action) in bindings {
let result = Self::load_keybinding(
@@ -296,7 +347,7 @@ impl KeymapFile {
}
if !section_errors.is_empty() {
- errors.push((context, section_errors))
+ errors.push((context.clone(), section_errors))
}
}
@@ -332,7 +383,17 @@ impl KeymapFile {
use_key_equivalents: bool,
cx: &App,
) -> std::result::Result<KeyBinding, String> {
- let (action, action_input_string) = Self::build_keymap_action(action, cx)?;
+ Self::load_keybinding_action_value(keystrokes, &action.0, context, use_key_equivalents, cx)
+ }
+
+ fn load_keybinding_action_value(
+ keystrokes: &str,
+ action: &Value,
+ context: Option<Rc<KeyBindingContextPredicate>>,
+ use_key_equivalents: bool,
+ cx: &App,
+ ) -> std::result::Result<KeyBinding, String> {
+ let (action, action_input_string) = Self::build_keymap_action_value(action, cx)?;
let key_binding = match KeyBinding::load(
keystrokes,
@@ -362,23 +423,70 @@ impl KeymapFile {
}
}
+ fn load_unbinding(
+ keystrokes: &str,
+ action: &UnbindTargetAction,
+ context: Option<Rc<KeyBindingContextPredicate>>,
+ use_key_equivalents: bool,
+ cx: &App,
+ ) -> std::result::Result<KeyBinding, String> {
+ let key_binding = Self::load_keybinding_action_value(
+ keystrokes,
+ &action.0,
+ context,
+ use_key_equivalents,
+ cx,
+ )?;
+
+ if key_binding.action().partial_eq(&NoAction) {
+ return Err("expected action name string or [name, input] array.".to_string());
+ }
+
+ if key_binding.action().name() == Unbind::name_for_type() {
+ return Err(format!(
+ "can't use {} as an unbind target.",
+ MarkdownInlineCode(&format!("\"{}\"", Unbind::name_for_type()))
+ ));
+ }
+
+ KeyBinding::load(
+ keystrokes,
+ Box::new(Unbind(key_binding.action().name().into())),
+ key_binding.predicate(),
+ use_key_equivalents,
+ key_binding.action_input(),
+ cx.keyboard_mapper().as_ref(),
+ )
+ .map_err(|InvalidKeystrokeError { keystroke }| {
+ format!(
+ "invalid keystroke {}. {}",
+ MarkdownInlineCode(&format!("\"{}\"", &keystroke)),
+ KEYSTROKE_PARSE_EXPECTED_MESSAGE
+ )
+ })
+ }
+
pub fn parse_action(
action: &KeymapAction,
) -> Result<Option<(&String, Option<&Value>)>, String> {
- let name_and_input = match &action.0 {
+ Self::parse_action_value(&action.0)
+ }
+
+ fn parse_action_value(action: &Value) -> Result<Option<(&String, Option<&Value>)>, String> {
+ let name_and_input = match action {
Value::Array(items) => {
if items.len() != 2 {
return Err(format!(
"expected two-element array of `[name, input]`. \
Instead found {}.",
- MarkdownInlineCode(&action.0.to_string())
+ MarkdownInlineCode(&action.to_string())
));
}
let serde_json::Value::String(ref name) = items[0] else {
return Err(format!(
"expected two-element array of `[name, input]`, \
but the first element is not a string in {}.",
- MarkdownInlineCode(&action.0.to_string())
+ MarkdownInlineCode(&action.to_string())
));
};
Some((name, Some(&items[1])))
@@ -389,7 +497,7 @@ impl KeymapFile {
return Err(format!(
"expected two-element array of `[name, input]`. \
Instead found {}.",
- MarkdownInlineCode(&action.0.to_string())
+ MarkdownInlineCode(&action.to_string())
));
}
};
@@ -400,7 +508,14 @@ impl KeymapFile {
action: &KeymapAction,
cx: &App,
) -> std::result::Result<(Box<dyn Action>, Option<String>), String> {
- let (build_result, action_input_string) = match Self::parse_action(action)? {
+ Self::build_keymap_action_value(&action.0, cx)
+ }
+
+ fn build_keymap_action_value(
+ action: &Value,
+ cx: &App,
+ ) -> std::result::Result<(Box<dyn Action>, Option<String>), String> {
+ let (build_result, action_input_string) = match Self::parse_action_value(action)? {
Some((name, action_input)) if name.as_str() == ActionSequence::name_for_type() => {
match action_input {
Some(action_input) => (
@@ -583,9 +698,15 @@ impl KeymapFile {
"minItems": 2,
"maxItems": 2
});
- let mut keymap_action_alternatives = vec![empty_action_name, empty_action_name_with_input];
+ let mut keymap_action_alternatives = vec![
+ empty_action_name.clone(),
+ empty_action_name_with_input.clone(),
+ ];
+ let mut unbind_target_action_alternatives =
+ vec![empty_action_name, empty_action_name_with_input];
let mut empty_schema_action_names = vec![];
+ let mut empty_schema_unbind_target_action_names = vec![];
for (name, action_schema) in action_schemas.into_iter() {
let deprecation = if name == NoAction.name() {
Some("null")
@@ -593,6 +714,9 @@ impl KeymapFile {
deprecations.get(name).copied()
};
+ let include_in_unbind_target_schema =
+ name != NoAction.name() && name != Unbind::name_for_type();
+
// Add an alternative for plain action names.
let mut plain_action = json_schema!({
"type": "string",
@@ -607,7 +731,10 @@ impl KeymapFile {
if let Some(description) = &description {
add_description(&mut plain_action, description);
}
- keymap_action_alternatives.push(plain_action);
+ keymap_action_alternatives.push(plain_action.clone());
+ if include_in_unbind_target_schema {
+ unbind_target_action_alternatives.push(plain_action);
+ }
// Add an alternative for actions with data specified as a [name, data] array.
//
@@ -633,9 +760,15 @@ impl KeymapFile {
"minItems": 2,
"maxItems": 2
});
- keymap_action_alternatives.push(action_with_input);
+ keymap_action_alternatives.push(action_with_input.clone());
+ if include_in_unbind_target_schema {
+ unbind_target_action_alternatives.push(action_with_input);
+ }
} else {
empty_schema_action_names.push(name);
+ if include_in_unbind_target_schema {
+ empty_schema_unbind_target_action_names.push(name);
+ }
}
}
@@ -659,20 +792,44 @@ impl KeymapFile {
keymap_action_alternatives.push(actions_with_empty_input);
}
+ if !empty_schema_unbind_target_action_names.is_empty() {
+ let action_names = json_schema!({ "enum": empty_schema_unbind_target_action_names });
+ let no_properties_allowed = json_schema!({
+ "type": "object",
+ "additionalProperties": false
+ });
+ let mut actions_with_empty_input = json_schema!({
+ "type": "array",
+ "items": [action_names, no_properties_allowed],
+ "minItems": 2,
+ "maxItems": 2
+ });
+ add_deprecation(
+ &mut actions_with_empty_input,
+ "This action does not take input - just the action name string should be used."
+ .to_string(),
+ );
+ unbind_target_action_alternatives.push(actions_with_empty_input);
+ }
+
// Placing null first causes json-language-server to default assuming actions should be
// null, so place it last.
keymap_action_alternatives.push(json_schema!({
"type": "null"
}));
- // The `KeymapSection` schema will reference the `KeymapAction` schema by name, so setting
- // the definition of `KeymapAction` results in the full action schema being used.
generator.definitions_mut().insert(
KeymapAction::schema_name().to_string(),
json!({
"anyOf": keymap_action_alternatives
}),
);
+ generator.definitions_mut().insert(
+ UnbindTargetAction::schema_name().to_string(),
+ json!({
+ "anyOf": unbind_target_action_alternatives
+ }),
+ );
generator.root_schema_for::<KeymapFile>().to_value()
}
@@ -1260,7 +1417,8 @@ impl Action for ActionSequence {
#[cfg(test)]
mod tests {
- use gpui::{DummyKeyboardMapper, KeybindingKeystroke, Keystroke};
+ use gpui::{Action, App, DummyKeyboardMapper, KeybindingKeystroke, Keystroke, Unbind};
+ use serde_json::Value;
use unindent::Unindent;
use crate::{
@@ -1268,6 +1426,8 @@ mod tests {
keymap_file::{KeybindUpdateOperation, KeybindUpdateTarget},
};
+ gpui::actions!(test_keymap_file, [StringAction, InputAction]);
+
#[test]
fn can_deserialize_keymap_with_trailing_comma() {
let json = indoc::indoc! {"[
@@ -1283,6 +1443,191 @@ mod tests {
KeymapFile::parse(json).unwrap();
}
+ #[gpui::test]
+ fn keymap_section_unbinds_are_loaded_before_bindings(cx: &mut App) {
+ let key_bindings = match KeymapFile::load(
+ indoc::indoc! {r#"
+ [
+ {
+ "unbind": {
+ "ctrl-a": "test_keymap_file::StringAction",
+ "ctrl-b": ["test_keymap_file::InputAction", {}]
+ },
+ "bindings": {
+ "ctrl-c": "test_keymap_file::StringAction"
+ }
+ }
+ ]
+ "#},
+ cx,
+ ) {
+ crate::keymap_file::KeymapFileLoadResult::Success { key_bindings } => key_bindings,
+ crate::keymap_file::KeymapFileLoadResult::SomeFailedToLoad {
+ error_message, ..
+ } => {
+ panic!("{error_message}");
+ }
+ crate::keymap_file::KeymapFileLoadResult::JsonParseFailure { error } => {
+ panic!("JSON parse error: {error}");
+ }
+ };
+
+ assert_eq!(key_bindings.len(), 3);
+ assert!(
+ key_bindings[0]
+ .action()
+ .partial_eq(&Unbind("test_keymap_file::StringAction".into()))
+ );
+ assert_eq!(key_bindings[0].action_input(), None);
+ assert!(
+ key_bindings[1]
+ .action()
+ .partial_eq(&Unbind("test_keymap_file::InputAction".into()))
+ );
+ assert_eq!(
+ key_bindings[1]
+ .action_input()
+ .as_ref()
+ .map(ToString::to_string),
+ Some("{}".to_string())
+ );
+ assert_eq!(
+ key_bindings[2].action().name(),
+ "test_keymap_file::StringAction"
+ );
+ }
+
+ #[gpui::test]
+ fn keymap_unbind_loads_valid_target_action_with_input(cx: &mut App) {
+ let key_bindings = match KeymapFile::load(
+ indoc::indoc! {r#"
+ [
+ {
+ "unbind": {
+ "ctrl-a": ["test_keymap_file::InputAction", {}]
+ }
+ }
+ ]
+ "#},
+ cx,
+ ) {
+ crate::keymap_file::KeymapFileLoadResult::Success { key_bindings } => key_bindings,
+ other => panic!("expected Success, got {other:?}"),
+ };
+
+ assert_eq!(key_bindings.len(), 1);
+ assert!(
+ key_bindings[0]
+ .action()
+ .partial_eq(&Unbind("test_keymap_file::InputAction".into()))
+ );
+ assert_eq!(
+ key_bindings[0]
+ .action_input()
+ .as_ref()
+ .map(ToString::to_string),
+ Some("{}".to_string())
+ );
+ }
+
+ #[gpui::test]
+ fn keymap_unbind_rejects_null(cx: &mut App) {
+ match KeymapFile::load(
+ indoc::indoc! {r#"
+ [
+ {
+ "unbind": {
+ "ctrl-a": null
+ }
+ }
+ ]
+ "#},
+ cx,
+ ) {
+ crate::keymap_file::KeymapFileLoadResult::SomeFailedToLoad {
+ key_bindings,
+ error_message,
+ } => {
+ assert!(key_bindings.is_empty());
+ assert!(
+ error_message
+ .0
+ .contains("expected action name string or [name, input] array.")
+ );
+ }
+ other => panic!("expected SomeFailedToLoad, got {other:?}"),
+ }
+ }
+
+ #[gpui::test]
+ fn keymap_unbind_rejects_unbind_action(cx: &mut App) {
+ match KeymapFile::load(
+ indoc::indoc! {r#"
+ [
+ {
+ "unbind": {
+ "ctrl-a": ["zed::Unbind", "test_keymap_file::StringAction"]
+ }
+ }
+ ]
+ "#},
+ cx,
+ ) {
+ crate::keymap_file::KeymapFileLoadResult::SomeFailedToLoad {
+ key_bindings,
+ error_message,
+ } => {
+ assert!(key_bindings.is_empty());
+ assert!(
+ error_message
+ .0
+ .contains("can't use `\"zed::Unbind\"` as an unbind target.")
+ );
+ }
+ other => panic!("expected SomeFailedToLoad, got {other:?}"),
+ }
+ }
+
+ #[test]
+ fn keymap_schema_for_unbind_excludes_null_and_unbind_action() {
+ fn schema_allows(schema: &Value, expected: &Value) -> bool {
+ match schema {
+ Value::Object(object) => {
+ if object.get("const") == Some(expected) {
+ return true;
+ }
+ if object.get("type") == Some(&Value::String("null".to_string()))
+ && expected == &Value::Null
+ {
+ return true;
+ }
+ object.values().any(|value| schema_allows(value, expected))
+ }
+ Value::Array(items) => items.iter().any(|value| schema_allows(value, expected)),
+ _ => false,
+ }
+ }
+
+ let schema = KeymapFile::generate_json_schema_from_inventory();
+ let unbind_schema = schema
+ .pointer("/$defs/UnbindTargetAction")
+ .expect("missing UnbindTargetAction schema");
+
+ assert!(!schema_allows(unbind_schema, &Value::Null));
+ assert!(!schema_allows(
+ unbind_schema,
+ &Value::String(Unbind::name_for_type().to_string())
+ ));
+ assert!(schema_allows(
+ unbind_schema,
+ &Value::String("test_keymap_file::StringAction".to_string())
+ ));
+ assert!(schema_allows(
+ unbind_schema,
+ &Value::String("test_keymap_file::InputAction".to_string())
+ ));
+ }
+
#[track_caller]
fn check_keymap_update(
input: impl ToString,
@@ -640,6 +640,11 @@ pub struct GitPanelSettingsContent {
///
/// Default: false
pub show_count_badge: Option<bool>,
+
+ /// Whether the git panel should open on startup.
+ ///
+ /// Default: false
+ pub starts_open: Option<bool>,
}
#[derive(
@@ -99,8 +99,7 @@ pub(crate) fn render_edit_prediction_setup_page(
IconName::AiOpenAiCompat,
"OpenAI Compatible API",
ApiKeyDocs::Custom {
- message: "Set an API key here. It will be sent as Authorization: Bearer {key}."
- .into(),
+ message: "The API key sent as Authorization: Bearer {key}.".into(),
},
open_ai_compatible_api_token(cx),
|cx| open_ai_compatible_api_url(cx),
@@ -172,10 +171,12 @@ fn render_provider_dropdown(window: &mut Window, cx: &mut App) -> AnyElement {
h_flex()
.pt_2p5()
.w_full()
+ .min_w_0()
.justify_between()
.child(
v_flex()
.w_full()
+ .min_w_0()
.max_w_1_2()
.child(Label::new("Provider"))
.child(
@@ -246,13 +247,15 @@ fn render_api_key_provider(
.no_padding(true);
let button_link_label = format!("{} dashboard", title);
let description = match docs {
- ApiKeyDocs::Custom { message } => h_flex().min_w_0().gap_0p5().child(
+ ApiKeyDocs::Custom { message } => div().min_w_0().w_full().child(
Label::new(message)
.size(LabelSize::Small)
.color(Color::Muted),
),
ApiKeyDocs::Link { dashboard_url } => h_flex()
+ .w_full()
.min_w_0()
+ .flex_wrap()
.gap_0p5()
.child(
Label::new("Visit the")
@@ -300,10 +303,12 @@ fn render_api_key_provider(
h_flex()
.pt_2p5()
.w_full()
+ .min_w_0()
.justify_between()
.child(
v_flex()
.w_full()
+ .min_w_0()
.max_w_1_2()
.child(Label::new("API Key"))
.child(description)
@@ -466,7 +471,7 @@ fn ollama_settings() -> Box<[SettingsPageItem]> {
}),
SettingsPageItem::SettingItem(SettingItem {
title: "Prompt Format",
- description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name",
+ description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name.",
field: Box::new(SettingField {
pick: |settings| {
settings
@@ -597,7 +602,7 @@ fn open_ai_compatible_settings() -> Box<[SettingsPageItem]> {
}),
SettingsPageItem::SettingItem(SettingItem {
title: "Prompt Format",
- description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name",
+ description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name.",
field: Box::new(SettingField {
pick: |settings| {
settings
@@ -249,10 +249,13 @@ fn render_tool_list_item(
h_flex()
.w_full()
+ .min_w_0()
.py_3()
.justify_between()
.child(
v_flex()
+ .w_full()
+ .min_w_0()
.child(h_flex().gap_1().child(Label::new(tool.name)).when_some(
rule_summary,
|this, summary| {
@@ -1072,9 +1075,12 @@ fn render_global_default_mode_section(current_mode: ToolPermissionMode) -> AnyEl
h_flex()
.my_4()
+ .min_w_0()
.justify_between()
.child(
v_flex()
+ .w_full()
+ .min_w_0()
.child(Label::new("Default Permission"))
.child(
Label::new(
@@ -1125,13 +1131,18 @@ fn render_default_mode_section(
let tool_id_owned = tool_id.to_string();
h_flex()
+ .min_w_0()
.justify_between()
.child(
- v_flex().child(Label::new("Default Action")).child(
- Label::new("Action to take when no patterns match.")
- .size(LabelSize::Small)
- .color(Color::Muted),
- ),
+ v_flex()
+ .w_full()
+ .min_w_0()
+ .child(Label::new("Default Action"))
+ .child(
+ Label::new("Action to take when no patterns match.")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
)
.child(
PopoverMenu::new(format!("default-mode-{}", tool_id))
@@ -1410,6 +1421,9 @@ mod tests {
// Subagent permission checks happen at the level of individual
// tool calls within the subagent, not at the spawning level.
"spawn_agent",
+ // update_plan updates UI-visible planning state but does not use
+ // tool permission rules.
+ "update_plan",
];
let tool_info_ids: Vec<&str> = TOOLS.iter().map(|t| t.id).collect();
@@ -392,29 +392,22 @@ pub fn init(cx: &mut App) {
let queue = ProjectSettingsUpdateQueue::new(cx);
cx.set_global(queue);
+ cx.on_action(|_: &OpenSettings, cx| {
+ open_settings_editor(None, None, None, cx);
+ });
+
cx.observe_new(|workspace: &mut workspace::Workspace, _, _| {
workspace
- .register_action(
- |workspace, OpenSettingsAt { path }: &OpenSettingsAt, window, cx| {
- let window_handle = window
- .window_handle()
- .downcast::<MultiWorkspace>()
- .expect("Workspaces are root Windows");
- open_settings_editor(workspace, Some(&path), None, window_handle, cx);
- },
- )
- .register_action(|workspace, _: &OpenSettings, window, cx| {
- let window_handle = window
- .window_handle()
- .downcast::<MultiWorkspace>()
- .expect("Workspaces are root Windows");
- open_settings_editor(workspace, None, None, window_handle, cx);
+ .register_action(|_, OpenSettingsAt { path }: &OpenSettingsAt, window, cx| {
+ let window_handle = window.window_handle().downcast::<MultiWorkspace>();
+ open_settings_editor(Some(&path), None, window_handle, cx);
+ })
+ .register_action(|_, _: &OpenSettings, window, cx| {
+ let window_handle = window.window_handle().downcast::<MultiWorkspace>();
+ open_settings_editor(None, None, window_handle, cx);
})
.register_action(|workspace, _: &OpenProjectSettings, window, cx| {
- let window_handle = window
- .window_handle()
- .downcast::<MultiWorkspace>()
- .expect("Workspaces are root Windows");
+ let window_handle = window.window_handle().downcast::<MultiWorkspace>();
let target_worktree_id = workspace
.project()
.read(cx)
@@ -425,7 +418,7 @@ pub fn init(cx: &mut App) {
.is_dir()
.then_some(tree.read(cx).id())
});
- open_settings_editor(workspace, None, target_worktree_id, window_handle, cx);
+ open_settings_editor(None, target_worktree_id, window_handle, cx);
});
})
.detach();
@@ -564,10 +557,9 @@ fn init_renderers(cx: &mut App) {
}
pub fn open_settings_editor(
- _workspace: &mut Workspace,
path: Option<&str>,
target_worktree_id: Option<WorktreeId>,
- workspace_handle: WindowHandle<MultiWorkspace>,
+ workspace_handle: Option<WindowHandle<MultiWorkspace>>,
cx: &mut App,
) {
telemetry::event!("Settings Viewed");
@@ -624,7 +616,8 @@ pub fn open_settings_editor(
if let Some(existing_window) = existing_window {
existing_window
.update(cx, |settings_window, window, cx| {
- settings_window.original_window = Some(workspace_handle);
+ settings_window.original_window = workspace_handle;
+
window.activate_window();
if let Some(path) = path {
open_path(path, settings_window, window, cx);
@@ -685,7 +678,7 @@ pub fn open_settings_editor(
},
|window, cx| {
let settings_window =
- cx.new(|cx| SettingsWindow::new(Some(workspace_handle), window, cx));
+ cx.new(|cx| SettingsWindow::new(workspace_handle, window, cx));
settings_window.update(cx, |settings_window, cx| {
if let Some(path) = path {
open_path(&path, settings_window, window, cx);
@@ -2191,37 +2184,39 @@ impl SettingsWindow {
ui_files.reverse();
- let mut missing_worktrees = Vec::new();
+ if self.original_window.is_some() {
+ let mut missing_worktrees = Vec::new();
- for worktree in all_projects(self.original_window.as_ref(), cx)
- .flat_map(|project| project.read(cx).visible_worktrees(cx))
- .filter(|tree| !self.worktree_root_dirs.contains_key(&tree.read(cx).id()))
- {
- let worktree = worktree.read(cx);
- let worktree_id = worktree.id();
- let Some(directory_name) = worktree.root_dir().and_then(|file| {
- file.file_name()
- .map(|os_string| os_string.to_string_lossy().to_string())
- }) else {
- continue;
- };
+ for worktree in all_projects(self.original_window.as_ref(), cx)
+ .flat_map(|project| project.read(cx).visible_worktrees(cx))
+ .filter(|tree| !self.worktree_root_dirs.contains_key(&tree.read(cx).id()))
+ {
+ let worktree = worktree.read(cx);
+ let worktree_id = worktree.id();
+ let Some(directory_name) = worktree.root_dir().and_then(|file| {
+ file.file_name()
+ .map(|os_string| os_string.to_string_lossy().to_string())
+ }) else {
+ continue;
+ };
- missing_worktrees.push((worktree_id, directory_name.clone()));
- let path = RelPath::empty().to_owned().into_arc();
+ missing_worktrees.push((worktree_id, directory_name.clone()));
+ let path = RelPath::empty().to_owned().into_arc();
- let settings_ui_file = SettingsUiFile::Project((worktree_id, path));
+ let settings_ui_file = SettingsUiFile::Project((worktree_id, path));
- let focus_handle = prev_files
- .iter()
- .find_map(|(prev_file, handle)| {
- (prev_file == &settings_ui_file).then(|| handle.clone())
- })
- .unwrap_or_else(|| cx.focus_handle().tab_index(0).tab_stop(true));
+ let focus_handle = prev_files
+ .iter()
+ .find_map(|(prev_file, handle)| {
+ (prev_file == &settings_ui_file).then(|| handle.clone())
+ })
+ .unwrap_or_else(|| cx.focus_handle().tab_index(0).tab_stop(true));
- ui_files.push((settings_ui_file, focus_handle));
- }
+ ui_files.push((settings_ui_file, focus_handle));
+ }
- self.worktree_root_dirs.extend(missing_worktrees);
+ self.worktree_root_dirs.extend(missing_worktrees);
+ }
self.files = ui_files;
let current_file_still_exists = self
@@ -2883,7 +2878,7 @@ impl SettingsWindow {
}
fn render_sub_page_breadcrumbs(&self) -> impl IntoElement {
- h_flex().gap_1().children(
+ h_flex().min_w_0().gap_1().overflow_x_hidden().children(
itertools::intersperse(
std::iter::once(self.current_page().title.into()).chain(
self.sub_page_stack
@@ -3113,9 +3108,11 @@ impl SettingsWindow {
if let Some(current_sub_page) = self.sub_page_stack.last() {
page_header = h_flex()
.w_full()
+ .min_w_0()
.justify_between()
.child(
h_flex()
+ .min_w_0()
.ml_neg_1p5()
.gap_1()
.child(
@@ -3130,17 +3127,19 @@ impl SettingsWindow {
)
.when(current_sub_page.link.in_json, |this| {
this.child(
- Button::new("open-in-settings-file", "Edit in settings.json")
- .tab_index(0_isize)
- .style(ButtonStyle::OutlinedGhost)
- .tooltip(Tooltip::for_action_title_in(
- "Edit in settings.json",
- &OpenCurrentFile,
- &self.focus_handle,
- ))
- .on_click(cx.listener(|this, _, window, cx| {
- this.open_current_settings_file(window, cx);
- })),
+ div().flex_shrink_0().child(
+ Button::new("open-in-settings-file", "Edit in settings.json")
+ .tab_index(0_isize)
+ .style(ButtonStyle::OutlinedGhost)
+ .tooltip(Tooltip::for_action_title_in(
+ "Edit in settings.json",
+ &OpenCurrentFile,
+ &self.focus_handle,
+ ))
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.open_current_settings_file(window, cx);
+ })),
+ ),
)
})
.into_any_element();
@@ -3310,6 +3309,7 @@ impl SettingsWindow {
.pt_6()
.gap_4()
.flex_1()
+ .min_w_0()
.bg(cx.theme().colors().editor_background)
.child(
v_flex()
@@ -25,6 +25,7 @@ chrono.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
+git.workspace = true
gpui.workspace = true
menu.workspace = true
project.workspace = true
@@ -33,6 +34,7 @@ settings.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
+vim_mode_setting.workspace = true
workspace.workspace = true
zed_actions.workspace = true
@@ -1,53 +1,55 @@
use acp_thread::ThreadStatus;
use action_log::DiffStats;
-use agent::ThreadStore;
use agent_client_protocol::{self as acp};
-use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore};
-use agent_ui::threads_archive_view::{ThreadsArchiveView, ThreadsArchiveViewEvent};
+use agent_ui::thread_metadata_store::{SidebarThreadMetadataStore, ThreadMetadata};
+use agent_ui::threads_archive_view::{
+ ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp,
+};
use agent_ui::{Agent, AgentPanel, AgentPanelEvent, NewThread, RemoveSelectedThread};
use chrono::Utc;
use editor::Editor;
use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _};
use gpui::{
Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, ListState, Pixels,
- Render, SharedString, WeakEntity, Window, actions, list, prelude::*, px,
+ Render, SharedString, WeakEntity, Window, WindowHandle, list, prelude::*, px,
+};
+use menu::{
+ Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious,
};
-use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious};
-use project::{AgentId, Event as ProjectEvent};
-use recent_projects::RecentProjects;
+use project::{AgentId, Event as ProjectEvent, linked_worktree_short_name};
+use recent_projects::sidebar_recent_projects::SidebarRecentProjects;
use ui::utils::platform_title_bar_height;
+use settings::Settings as _;
use std::collections::{HashMap, HashSet};
use std::mem;
use std::path::Path;
+use std::rc::Rc;
use std::sync::Arc;
use theme::ActiveTheme;
use ui::{
- AgentThreadStatus, ButtonStyle, CommonAnimationExt as _, HighlightedLabel, KeyBinding,
- ListItem, PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, TintColor, Tooltip, WithScrollbar,
- prelude::*,
+ AgentThreadStatus, CommonAnimationExt, ContextMenu, Divider, HighlightedLabel, KeyBinding,
+ PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, TintColor, Tooltip, WithScrollbar, prelude::*,
};
use util::ResultExt as _;
use util::path_list::PathList;
use workspace::{
- FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, Sidebar as WorkspaceSidebar,
- ToggleWorkspaceSidebar, Workspace, WorkspaceId,
+ AddFolderToProject, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, Open,
+ Sidebar as WorkspaceSidebar, ToggleWorkspaceSidebar, Workspace, WorkspaceId,
};
use zed_actions::OpenRecent;
use zed_actions::editor::{MoveDown, MoveUp};
-actions!(
+use zed_actions::agents_sidebar::FocusSidebarFilter;
+
+gpui::actions!(
agents_sidebar,
[
- /// Collapses the selected entry in the workspace sidebar.
- CollapseSelectedEntry,
- /// Expands the selected entry in the workspace sidebar.
- ExpandSelectedEntry,
- /// Moves focus to the sidebar's search/filter editor.
- FocusSidebarFilter,
/// Creates a new thread in the currently selected or active project group.
NewThreadInGroup,
+ /// Toggles between the thread list and the archive view.
+ ToggleArchive,
]
);
@@ -56,11 +58,11 @@ const MIN_WIDTH: Pixels = px(200.0);
const MAX_WIDTH: Pixels = px(800.0);
const DEFAULT_THREADS_SHOWN: usize = 5;
-#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
+#[derive(Debug, Default)]
enum SidebarView {
#[default]
ThreadList,
- Archive,
+ Archive(Entity<ThreadsArchiveView>),
}
#[derive(Clone, Debug)]
@@ -107,6 +109,7 @@ struct ThreadEntry {
is_title_generating: bool,
highlight_positions: Vec<usize>,
worktree_name: Option<SharedString>,
+ worktree_full_path: Option<SharedString>,
worktree_highlight_positions: Vec<usize>,
diff_stats: DiffStats,
}
@@ -124,12 +127,12 @@ enum ListEntry {
Thread(ThreadEntry),
ViewMore {
path_list: PathList,
- remaining_count: usize,
is_fully_expanded: bool,
},
NewThread {
path_list: PathList,
workspace: Entity<Workspace>,
+ is_active_draft: bool,
},
}
@@ -144,6 +147,7 @@ struct SidebarContents {
entries: Vec<ListEntry>,
notified_threads: HashSet<acp::SessionId>,
project_header_indices: Vec<usize>,
+ has_open_projects: bool,
}
impl SidebarContents {
@@ -219,6 +223,10 @@ fn workspace_label_from_path_list(path_list: &PathList) -> SharedString {
}
}
+/// The sidebar re-derives its entire entry list from scratch on every
+/// change via `update_entries` → `rebuild_contents`. Avoid adding
+/// incremental or inter-event coordination state — if something can
+/// be computed from the current world state, compute it in the rebuild.
pub struct Sidebar {
multi_workspace: WeakEntity<MultiWorkspace>,
width: Pixels,
@@ -230,23 +238,20 @@ pub struct Sidebar {
///
/// Note: This is NOT the same as the active item.
selection: Option<usize>,
+ /// Derived from the active panel's thread in `rebuild_contents`.
+ /// Only updated when the panel returns `Some` — never cleared by
+ /// derivation, since the panel may transiently return `None` while
+ /// loading. User actions may write directly for immediate feedback.
focused_thread: Option<acp::SessionId>,
- /// Set to true when WorkspaceRemoved fires so the subsequent
- /// ActiveWorkspaceChanged event knows not to clear focused_thread.
- /// A workspace removal changes the active workspace as a side-effect, but
- /// that should not reset the user's thread focus the way an explicit
- /// workspace switch does.
- pending_workspace_removal: bool,
-
- active_entry_index: Option<usize>,
+ agent_panel_visible: bool,
+ active_thread_is_draft: bool,
hovered_thread_index: Option<usize>,
collapsed_groups: HashSet<PathList>,
expanded_groups: HashMap<PathList, usize>,
view: SidebarView,
- archive_view: Option<Entity<ThreadsArchiveView>>,
- recent_projects_popover_handle: PopoverMenuHandle<RecentProjects>,
+ recent_projects_popover_handle: PopoverMenuHandle<SidebarRecentProjects>,
+ project_header_menu_ix: Option<usize>,
_subscriptions: Vec<gpui::Subscription>,
- _update_entries_task: Option<gpui::Task<()>>,
_draft_observation: Option<gpui::Subscription>,
}
@@ -262,6 +267,7 @@ impl Sidebar {
let filter_editor = cx.new(|cx| {
let mut editor = Editor::single_line(window, cx);
+ editor.set_use_modal_editing(true);
editor.set_placeholder_text("Search…", window, cx);
editor
});
@@ -271,50 +277,15 @@ impl Sidebar {
window,
|this, _multi_workspace, event: &MultiWorkspaceEvent, window, cx| match event {
MultiWorkspaceEvent::ActiveWorkspaceChanged => {
- if mem::take(&mut this.pending_workspace_removal) {
- // If the removed workspace had no focused thread, seed
- // from the new active panel so its current thread gets
- // highlighted — same logic as subscribe_to_workspace.
- if this.focused_thread.is_none() {
- if let Some(mw) = this.multi_workspace.upgrade() {
- let ws = mw.read(cx).workspace();
- if let Some(panel) = ws.read(cx).panel::<AgentPanel>(cx) {
- this.focused_thread = panel
- .read(cx)
- .active_conversation()
- .and_then(|cv| cv.read(cx).parent_id(cx));
- }
- }
- }
- } else {
- // Seed focused_thread from the new active panel so
- // the sidebar highlights the correct thread.
- this.focused_thread = this
- .multi_workspace
- .upgrade()
- .and_then(|mw| {
- let ws = mw.read(cx).workspace();
- ws.read(cx).panel::<AgentPanel>(cx)
- })
- .and_then(|panel| {
- panel
- .read(cx)
- .active_conversation()
- .and_then(|cv| cv.read(cx).parent_id(cx))
- });
- }
this.observe_draft_editor(cx);
- this.update_entries(false, cx);
+ this.update_entries(cx);
}
MultiWorkspaceEvent::WorkspaceAdded(workspace) => {
this.subscribe_to_workspace(workspace, window, cx);
- this.update_entries(false, cx);
+ this.update_entries(cx);
}
MultiWorkspaceEvent::WorkspaceRemoved(_) => {
- // Signal that the upcoming ActiveWorkspaceChanged event is
- // a consequence of this removal, not a user workspace switch.
- this.pending_workspace_removal = true;
- this.update_entries(false, cx);
+ this.update_entries(cx);
}
},
)
@@ -326,18 +297,24 @@ impl Sidebar {
if !query.is_empty() {
this.selection.take();
}
- this.update_entries(!query.is_empty(), cx);
+ this.update_entries(cx);
+ if !query.is_empty() {
+ this.select_first_entry();
+ }
}
})
.detach();
- cx.observe(&ThreadMetadataStore::global(cx), |this, _store, cx| {
- this.update_entries(false, cx);
- })
+ cx.observe(
+ &SidebarThreadMetadataStore::global(cx),
+ |this, _store, cx| {
+ this.update_entries(cx);
+ },
+ )
.detach();
cx.observe_flag::<AgentV2FeatureFlag, _>(window, |_is_enabled, this, _window, cx| {
- this.update_entries(false, cx);
+ this.update_entries(cx);
})
.detach();
@@ -346,11 +323,10 @@ impl Sidebar {
for workspace in &workspaces {
this.subscribe_to_workspace(workspace, window, cx);
}
- this.update_entries(false, cx);
+ this.update_entries(cx);
});
Self {
- _update_entries_task: None,
multi_workspace: multi_workspace.downgrade(),
width: DEFAULT_WIDTH,
focus_handle,
@@ -359,19 +335,25 @@ impl Sidebar {
contents: SidebarContents::default(),
selection: None,
focused_thread: None,
- pending_workspace_removal: false,
- active_entry_index: None,
+ agent_panel_visible: false,
+ active_thread_is_draft: false,
hovered_thread_index: None,
collapsed_groups: HashSet::new(),
expanded_groups: HashMap::new(),
view: SidebarView::default(),
- archive_view: None,
recent_projects_popover_handle: PopoverMenuHandle::default(),
+ project_header_menu_ix: None,
_subscriptions: Vec::new(),
_draft_observation: None,
}
}
+ fn is_active_workspace(&self, workspace: &Entity<Workspace>, cx: &App) -> bool {
+ self.multi_workspace
+ .upgrade()
+ .map_or(false, |mw| mw.read(cx).workspace() == workspace)
+ }
+
fn subscribe_to_workspace(
&mut self,
workspace: &Entity<Workspace>,
@@ -386,7 +368,7 @@ impl Sidebar {
ProjectEvent::WorktreeAdded(_)
| ProjectEvent::WorktreeRemoved(_)
| ProjectEvent::WorktreeOrderChanged => {
- this.update_entries(false, cx);
+ this.update_entries(cx);
}
_ => {}
},
@@ -407,7 +389,7 @@ impl Sidebar {
)
) {
this.prune_stale_worktree_workspaces(window, cx);
- this.update_entries(false, cx);
+ this.update_entries(cx);
}
},
)
@@ -426,15 +408,13 @@ impl Sidebar {
)
.detach();
+ self.observe_docks(workspace, cx);
+
if let Some(agent_panel) = workspace.read(cx).panel::<AgentPanel>(cx) {
self.subscribe_to_agent_panel(&agent_panel, window, cx);
- // Seed the initial focused_thread so the correct thread item is
- // highlighted right away, without waiting for the panel to emit
- // an event (which only happens on *changes*, not on first load).
- self.focused_thread = agent_panel
- .read(cx)
- .active_conversation()
- .and_then(|cv| cv.read(cx).parent_id(cx));
+ if self.is_active_workspace(workspace, cx) {
+ self.agent_panel_visible = AgentPanel::is_visible(workspace, cx);
+ }
self.observe_draft_editor(cx);
}
}
@@ -448,50 +428,52 @@ impl Sidebar {
cx.subscribe_in(
agent_panel,
window,
- |this, agent_panel, event: &AgentPanelEvent, _window, cx| {
- // Check whether the panel that emitted this event belongs to
- // the currently active workspace. Only the active workspace's
- // panel should drive focused_thread — otherwise running threads
- // in background workspaces would continuously overwrite it,
- // causing the selection highlight to jump around.
- let is_active_panel = this
- .multi_workspace
- .upgrade()
- .and_then(|mw| mw.read(cx).workspace().read(cx).panel::<AgentPanel>(cx))
- .is_some_and(|active_panel| active_panel == *agent_panel);
-
- match event {
- AgentPanelEvent::ActiveViewChanged => {
- if is_active_panel {
- this.focused_thread = agent_panel
- .read(cx)
- .active_conversation()
- .and_then(|cv| cv.read(cx).parent_id(cx));
- this.observe_draft_editor(cx);
- }
- this.update_entries(false, cx);
- }
- AgentPanelEvent::ThreadFocused => {
- if is_active_panel {
- let new_focused = agent_panel
- .read(cx)
- .active_conversation()
- .and_then(|cv| cv.read(cx).parent_id(cx));
- if new_focused.is_some() && new_focused != this.focused_thread {
- this.focused_thread = new_focused;
- this.update_entries(false, cx);
- }
- }
- }
- AgentPanelEvent::BackgroundThreadChanged => {
- this.update_entries(false, cx);
+ |this, agent_panel, event: &AgentPanelEvent, _window, cx| match event {
+ AgentPanelEvent::ActiveViewChanged => {
+ let is_new_draft = agent_panel
+ .read(cx)
+ .active_conversation_view()
+ .is_some_and(|cv| cv.read(cx).parent_id(cx).is_none());
+ if is_new_draft {
+ this.focused_thread = None;
}
+ this.observe_draft_editor(cx);
+ this.update_entries(cx);
+ }
+ AgentPanelEvent::ThreadFocused | AgentPanelEvent::BackgroundThreadChanged => {
+ this.update_entries(cx);
}
},
)
.detach();
}
+ fn observe_docks(&mut self, workspace: &Entity<Workspace>, cx: &mut Context<Self>) {
+ let workspace = workspace.clone();
+ let docks: Vec<_> = workspace
+ .read(cx)
+ .all_docks()
+ .into_iter()
+ .cloned()
+ .collect();
+ for dock in docks {
+ let workspace = workspace.clone();
+ cx.observe(&dock, move |this, _dock, cx| {
+ if !this.is_active_workspace(&workspace, cx) {
+ return;
+ }
+
+ let is_visible = AgentPanel::is_visible(&workspace, cx);
+
+ if this.agent_panel_visible != is_visible {
+ this.agent_panel_visible = is_visible;
+ cx.notify();
+ }
+ })
+ .detach();
+ }
+ }
+
fn observe_draft_editor(&mut self, cx: &mut Context<Self>) {
self._draft_observation = self
.multi_workspace
@@ -501,7 +483,7 @@ impl Sidebar {
ws.read(cx).panel::<AgentPanel>(cx)
})
.and_then(|panel| {
- let cv = panel.read(cx).active_conversation()?;
+ let cv = panel.read(cx).active_conversation_view()?;
let tv = cv.read(cx).active_thread()?;
Some(tv.read(cx).message_editor.clone())
})
@@ -516,7 +498,7 @@ impl Sidebar {
let mw = self.multi_workspace.upgrade()?;
let workspace = mw.read(cx).workspace();
let panel = workspace.read(cx).panel::<AgentPanel>(cx)?;
- let conversation_view = panel.read(cx).active_conversation()?;
+ let conversation_view = panel.read(cx).active_conversation_view()?;
let thread_view = conversation_view.read(cx).active_thread()?;
let raw = thread_view.read(cx).message_editor.read(cx).text(cx);
let cleaned = Self::clean_mention_links(&raw);
@@ -607,7 +589,9 @@ impl Sidebar {
.collect()
}
- fn rebuild_contents(&mut self, thread_entries: Vec<ThreadMetadata>, cx: &App) {
+ /// When modifying this thread, aim for a single forward pass over workspaces
+ /// and threads plus an O(T log T) sort. Avoid adding extra scans over the data.
+ fn rebuild_contents(&mut self, cx: &App) {
let Some(multi_workspace) = self.multi_workspace.upgrade() else {
return;
};
@@ -615,14 +599,6 @@ impl Sidebar {
let workspaces = mw.workspaces().to_vec();
let active_workspace = mw.workspaces().get(mw.active_workspace_index()).cloned();
- let mut threads_by_paths: HashMap<PathList, Vec<ThreadMetadata>> = HashMap::new();
- for row in thread_entries {
- threads_by_paths
- .entry(row.folder_paths.clone())
- .or_default()
- .push(row);
- }
-
// Build a lookup for agent icons from the first workspace's AgentServerStore.
let agent_server_store = workspaces
.first()
@@ -630,20 +606,37 @@ impl Sidebar {
let query = self.filter_editor.read(cx).text(cx);
- let previous = mem::take(&mut self.contents);
+ // Re-derive agent_panel_visible from the active workspace so it stays
+ // correct after workspace switches.
+ self.agent_panel_visible = active_workspace
+ .as_ref()
+ .map_or(false, |ws| AgentPanel::is_visible(ws, cx));
- // Collect the session IDs that were visible before this rebuild so we
- // can distinguish a thread that was deleted/removed (was in the list,
- // now gone) from a brand-new thread that hasn't been saved to the
- // metadata store yet (never was in the list).
- let previous_session_ids: HashSet<acp::SessionId> = previous
- .entries
- .iter()
- .filter_map(|entry| match entry {
- ListEntry::Thread(t) => Some(t.session_info.session_id.clone()),
- _ => None,
- })
- .collect();
+ // Derive active_thread_is_draft BEFORE focused_thread so we can
+ // use it as a guard below.
+ self.active_thread_is_draft = active_workspace
+ .as_ref()
+ .and_then(|ws| ws.read(cx).panel::<AgentPanel>(cx))
+ .map_or(false, |panel| panel.read(cx).active_thread_is_draft(cx));
+
+ // Derive focused_thread from the active workspace's agent panel.
+ // Only update when the panel gives us a positive signal — if the
+ // panel returns None (e.g. still loading after a thread activation),
+ // keep the previous value so eager writes from user actions survive.
+ let panel_focused = active_workspace
+ .as_ref()
+ .and_then(|ws| ws.read(cx).panel::<AgentPanel>(cx))
+ .and_then(|panel| {
+ panel
+ .read(cx)
+ .active_conversation_view()
+ .and_then(|cv| cv.read(cx).parent_id(cx))
+ });
+ if panel_focused.is_some() && !self.active_thread_is_draft {
+ self.focused_thread = panel_focused;
+ }
+
+ let previous = mem::take(&mut self.contents);
let old_statuses: HashMap<acp::SessionId, AgentThreadStatus> = previous
.entries
@@ -658,11 +651,8 @@ impl Sidebar {
let mut entries = Vec::new();
let mut notified_threads = previous.notified_threads;
- // Track all session IDs we add to entries so we can prune stale
- // notifications without a separate pass at the end.
let mut current_session_ids: HashSet<acp::SessionId> = HashSet::new();
- // Compute active_entry_index inline during the build pass.
- let mut active_entry_index: Option<usize> = None;
+ let mut project_header_indices: Vec<usize> = Vec::new();
// Identify absorbed workspaces in a single pass. A workspace is
// "absorbed" when it points at a git worktree checkout whose main
@@ -709,71 +699,80 @@ impl Sidebar {
}
}
+ let has_open_projects = workspaces
+ .iter()
+ .any(|ws| !workspace_path_list(ws, cx).paths().is_empty());
+
+ let active_ws_index = active_workspace
+ .as_ref()
+ .and_then(|active| workspaces.iter().position(|ws| ws == active));
+
for (ws_index, workspace) in workspaces.iter().enumerate() {
if absorbed.contains_key(&ws_index) {
continue;
}
let path_list = workspace_path_list(workspace, cx);
+ if path_list.paths().is_empty() {
+ continue;
+ }
+
let label = workspace_label_from_path_list(&path_list);
let is_collapsed = self.collapsed_groups.contains(&path_list);
let should_load_threads = !is_collapsed || !query.is_empty();
- let live_infos = Self::all_thread_infos_for_workspace(workspace, cx);
- let has_running_threads = live_infos
- .iter()
- .any(|info| info.status == AgentThreadStatus::Running);
- let waiting_thread_count = live_infos
- .iter()
- .filter(|info| info.status == AgentThreadStatus::WaitingForConfirmation)
- .count();
+ let mut live_infos = Self::all_thread_infos_for_workspace(workspace, cx);
let mut threads: Vec<ThreadEntry> = Vec::new();
+ let mut has_running_threads = false;
+ let mut waiting_thread_count: usize = 0;
if should_load_threads {
let mut seen_session_ids: HashSet<acp::SessionId> = HashSet::new();
- // Read threads from SidebarDb for this workspace's path list.
- if let Some(rows) = threads_by_paths.get(&path_list) {
- for row in rows {
- seen_session_ids.insert(row.session_id.clone());
- let (agent, icon, icon_from_external_svg) = match &row.agent_id {
- None => (Agent::NativeAgent, IconName::ZedAgent, None),
- Some(id) => {
- let custom_icon = agent_server_store
- .as_ref()
- .and_then(|store| store.read(cx).agent_icon(&id));
- (
- Agent::Custom { id: id.clone() },
- IconName::Terminal,
- custom_icon,
- )
- }
- };
- threads.push(ThreadEntry {
- agent,
- session_info: acp_thread::AgentSessionInfo {
- session_id: row.session_id.clone(),
- work_dirs: None,
- title: Some(row.title.clone()),
- updated_at: Some(row.updated_at),
- created_at: row.created_at,
- meta: None,
- },
- icon,
- icon_from_external_svg,
- status: AgentThreadStatus::default(),
- workspace: ThreadEntryWorkspace::Open(workspace.clone()),
- is_live: false,
- is_background: false,
- is_title_generating: false,
- highlight_positions: Vec::new(),
- worktree_name: None,
- worktree_highlight_positions: Vec::new(),
- diff_stats: DiffStats::default(),
- });
- }
+ // Read threads from the store cache for this workspace's path list.
+ let thread_store = SidebarThreadMetadataStore::global(cx);
+ let workspace_rows: Vec<_> =
+ thread_store.read(cx).entries_for_path(&path_list).collect();
+ for row in workspace_rows {
+ seen_session_ids.insert(row.session_id.clone());
+ let (agent, icon, icon_from_external_svg) = match &row.agent_id {
+ None => (Agent::NativeAgent, IconName::ZedAgent, None),
+ Some(id) => {
+ let custom_icon = agent_server_store
+ .as_ref()
+ .and_then(|store| store.read(cx).agent_icon(&id));
+ (
+ Agent::Custom { id: id.clone() },
+ IconName::Terminal,
+ custom_icon,
+ )
+ }
+ };
+ threads.push(ThreadEntry {
+ agent,
+ session_info: acp_thread::AgentSessionInfo {
+ session_id: row.session_id.clone(),
+ work_dirs: None,
+ title: Some(row.title.clone()),
+ updated_at: Some(row.updated_at),
+ created_at: row.created_at,
+ meta: None,
+ },
+ icon,
+ icon_from_external_svg,
+ status: AgentThreadStatus::default(),
+ workspace: ThreadEntryWorkspace::Open(workspace.clone()),
+ is_live: false,
+ is_background: false,
+ is_title_generating: false,
+ highlight_positions: Vec::new(),
+ worktree_name: None,
+ worktree_full_path: None,
+ worktree_highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
+ });
}
// Load threads from linked git worktrees of this workspace's repos.
@@ -784,16 +783,16 @@ impl Sidebar {
if snapshot.work_directory_abs_path != snapshot.original_repo_abs_path {
continue;
}
+
+ let main_worktree_path = snapshot.original_repo_abs_path.clone();
+
for git_worktree in snapshot.linked_worktrees() {
- let name = git_worktree
- .path
- .file_name()
- .unwrap_or_default()
- .to_string_lossy()
- .to_string();
+ let worktree_name =
+ linked_worktree_short_name(&main_worktree_path, &git_worktree.path)
+ .unwrap_or_default();
linked_worktree_queries.push((
PathList::new(std::slice::from_ref(&git_worktree.path)),
- name.into(),
+ worktree_name,
Arc::from(git_worktree.path.as_path()),
));
}
@@ -804,73 +803,89 @@ impl Sidebar {
{
let target_workspace =
match absorbed_workspace_by_path.get(worktree_path.as_ref()) {
- Some(&idx) => ThreadEntryWorkspace::Open(workspaces[idx].clone()),
+ Some(&idx) => {
+ live_infos.extend(Self::all_thread_infos_for_workspace(
+ &workspaces[idx],
+ cx,
+ ));
+ ThreadEntryWorkspace::Open(workspaces[idx].clone())
+ }
None => ThreadEntryWorkspace::Closed(worktree_path_list.clone()),
};
- if let Some(rows) = threads_by_paths.get(worktree_path_list) {
- for row in rows {
- if !seen_session_ids.insert(row.session_id.clone()) {
- continue;
- }
- let (agent, icon, icon_from_external_svg) = match &row.agent_id {
- None => (Agent::NativeAgent, IconName::ZedAgent, None),
- Some(name) => {
- let custom_icon =
- agent_server_store.as_ref().and_then(|store| {
- store
- .read(cx)
- .agent_icon(&AgentId(name.clone().into()))
- });
- (
- Agent::Custom {
- id: AgentId::new(name.clone()),
- },
- IconName::Terminal,
- custom_icon,
- )
- }
- };
- threads.push(ThreadEntry {
- agent,
- session_info: acp_thread::AgentSessionInfo {
- session_id: row.session_id.clone(),
- work_dirs: None,
- title: Some(row.title.clone()),
- updated_at: Some(row.updated_at),
- created_at: row.created_at,
- meta: None,
- },
- icon,
- icon_from_external_svg,
- status: AgentThreadStatus::default(),
- workspace: target_workspace.clone(),
- is_live: false,
- is_background: false,
- is_title_generating: false,
- highlight_positions: Vec::new(),
- worktree_name: Some(worktree_name.clone()),
- worktree_highlight_positions: Vec::new(),
- diff_stats: DiffStats::default(),
- });
+ let worktree_rows: Vec<_> = thread_store
+ .read(cx)
+ .entries_for_path(worktree_path_list)
+ .collect();
+ for row in worktree_rows {
+ if !seen_session_ids.insert(row.session_id.clone()) {
+ continue;
}
+ let (agent, icon, icon_from_external_svg) = match &row.agent_id {
+ None => (Agent::NativeAgent, IconName::ZedAgent, None),
+ Some(name) => {
+ let custom_icon =
+ agent_server_store.as_ref().and_then(|store| {
+ store.read(cx).agent_icon(&AgentId(name.clone().into()))
+ });
+ (
+ Agent::Custom {
+ id: AgentId::new(name.clone()),
+ },
+ IconName::Terminal,
+ custom_icon,
+ )
+ }
+ };
+ threads.push(ThreadEntry {
+ agent,
+ session_info: acp_thread::AgentSessionInfo {
+ session_id: row.session_id.clone(),
+ work_dirs: None,
+ title: Some(row.title.clone()),
+ updated_at: Some(row.updated_at),
+ created_at: row.created_at,
+ meta: None,
+ },
+ icon,
+ icon_from_external_svg,
+ status: AgentThreadStatus::default(),
+ workspace: target_workspace.clone(),
+ is_live: false,
+ is_background: false,
+ is_title_generating: false,
+ highlight_positions: Vec::new(),
+ worktree_name: Some(worktree_name.clone()),
+ worktree_full_path: Some(
+ worktree_path.display().to_string().into(),
+ ),
+ worktree_highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
+ });
}
}
}
- if !live_infos.is_empty() {
- let thread_index_by_session: HashMap<acp::SessionId, usize> = threads
- .iter()
- .enumerate()
- .map(|(i, t)| (t.session_info.session_id.clone(), i))
- .collect();
+ // Build a lookup from live_infos and compute running/waiting
+ // counts in a single pass.
+ let mut live_info_by_session: HashMap<&acp::SessionId, &ActiveThreadInfo> =
+ HashMap::new();
+ for info in &live_infos {
+ live_info_by_session.insert(&info.session_id, info);
+ if info.status == AgentThreadStatus::Running {
+ has_running_threads = true;
+ }
+ if info.status == AgentThreadStatus::WaitingForConfirmation {
+ waiting_thread_count += 1;
+ }
+ }
- for info in &live_infos {
- let Some(&idx) = thread_index_by_session.get(&info.session_id) else {
- continue;
- };
+ // Merge live info into threads and update notification state
+ // in a single pass.
+ for thread in &mut threads {
+ let session_id = &thread.session_info.session_id;
- let thread = &mut threads[idx];
+ if let Some(info) = live_info_by_session.get(session_id) {
thread.session_info.title = Some(info.title.clone());
thread.status = info.status;
thread.icon = info.icon;
@@ -880,36 +895,40 @@ impl Sidebar {
thread.is_title_generating = info.is_title_generating;
thread.diff_stats = info.diff_stats;
}
- }
- // Update notification state for live threads in the same pass.
- let is_active_workspace = active_workspace
- .as_ref()
- .is_some_and(|active| active == workspace);
+ let is_thread_workspace_active = match &thread.workspace {
+ ThreadEntryWorkspace::Open(thread_workspace) => active_workspace
+ .as_ref()
+ .is_some_and(|active| active == thread_workspace),
+ ThreadEntryWorkspace::Closed(_) => false,
+ };
- for thread in &threads {
- let session_id = &thread.session_info.session_id;
- if thread.is_background && thread.status == AgentThreadStatus::Completed {
- notified_threads.insert(session_id.clone());
- } else if thread.status == AgentThreadStatus::Completed
- && !is_active_workspace
+ if thread.status == AgentThreadStatus::Completed
+ && !is_thread_workspace_active
&& old_statuses.get(session_id) == Some(&AgentThreadStatus::Running)
{
notified_threads.insert(session_id.clone());
}
- if is_active_workspace && !thread.is_background {
+ if is_thread_workspace_active && !thread.is_background {
notified_threads.remove(session_id);
}
}
- // Sort by created_at (newest first), falling back to updated_at
- // for threads without a created_at (e.g., ACP sessions).
threads.sort_by(|a, b| {
let a_time = a.session_info.created_at.or(a.session_info.updated_at);
let b_time = b.session_info.created_at.or(b.session_info.updated_at);
b_time.cmp(&a_time)
});
+ } else {
+ for info in &live_infos {
+ if info.status == AgentThreadStatus::Running {
+ has_running_threads = true;
+ }
+ if info.status == AgentThreadStatus::WaitingForConfirmation {
+ waiting_thread_count += 1;
+ }
+ }
}
if !query.is_empty() {
@@ -946,6 +965,7 @@ impl Sidebar {
continue;
}
+ project_header_indices.push(entries.len());
entries.push(ListEntry::ProjectHeader {
path_list: path_list.clone(),
label,
@@ -955,20 +975,25 @@ impl Sidebar {
waiting_thread_count,
});
- // Track session IDs and compute active_entry_index as we add
- // thread entries.
for thread in matched_threads {
current_session_ids.insert(thread.session_info.session_id.clone());
- if active_entry_index.is_none() {
- if let Some(focused) = &self.focused_thread {
- if &thread.session_info.session_id == focused {
- active_entry_index = Some(entries.len());
- }
- }
- }
entries.push(thread.into());
}
} else {
+ let thread_count = threads.len();
+ let is_draft_for_workspace = self.agent_panel_visible
+ && self.active_thread_is_draft
+ && self.focused_thread.is_none()
+ && active_ws_index.is_some_and(|active_idx| {
+ active_idx == ws_index
+ || absorbed
+ .get(&active_idx)
+ .is_some_and(|(main_idx, _)| *main_idx == ws_index)
+ });
+
+ let show_new_thread_entry = thread_count == 0 || is_draft_for_workspace;
+
+ project_header_indices.push(entries.len());
entries.push(ListEntry::ProjectHeader {
path_list: path_list.clone(),
label,
@@ -982,10 +1007,13 @@ impl Sidebar {
continue;
}
- entries.push(ListEntry::NewThread {
- path_list: path_list.clone(),
- workspace: workspace.clone(),
- });
+ if show_new_thread_entry {
+ entries.push(ListEntry::NewThread {
+ path_list: path_list.clone(),
+ workspace: workspace.clone(),
+ is_active_draft: is_draft_for_workspace,
+ });
+ }
let total = threads.len();
@@ -993,26 +1021,43 @@ impl Sidebar {
let threads_to_show =
DEFAULT_THREADS_SHOWN + (extra_batches * DEFAULT_THREADS_SHOWN);
let count = threads_to_show.min(total);
- let is_fully_expanded = count >= total;
- // Track session IDs and compute active_entry_index as we add
- // thread entries.
- for thread in threads.into_iter().take(count) {
- current_session_ids.insert(thread.session_info.session_id.clone());
- if active_entry_index.is_none() {
- if let Some(focused) = &self.focused_thread {
- if &thread.session_info.session_id == focused {
- active_entry_index = Some(entries.len());
- }
+ let mut promoted_threads: HashSet<acp::SessionId> = HashSet::new();
+
+ // Build visible entries in a single pass. Threads within
+ // the cutoff are always shown. Threads beyond it are shown
+ // only if they should be promoted (running, waiting, or
+ // focused)
+ for (index, thread) in threads.into_iter().enumerate() {
+ let is_hidden = index >= count;
+
+ let session_id = &thread.session_info.session_id;
+ if is_hidden {
+ let is_promoted = thread.status == AgentThreadStatus::Running
+ || thread.status == AgentThreadStatus::WaitingForConfirmation
+ || notified_threads.contains(session_id)
+ || self
+ .focused_thread
+ .as_ref()
+ .is_some_and(|id| id == session_id);
+ if is_promoted {
+ promoted_threads.insert(session_id.clone());
+ }
+ if !promoted_threads.contains(session_id) {
+ continue;
}
}
+
+ current_session_ids.insert(session_id.clone());
entries.push(thread.into());
}
+ let visible = count + promoted_threads.len();
+ let is_fully_expanded = visible >= total;
+
if total > DEFAULT_THREADS_SHOWN {
entries.push(ListEntry::ViewMore {
path_list: path_list.clone(),
- remaining_count: total.saturating_sub(count),
is_fully_expanded,
});
}
@@ -3,8 +3,5 @@ use gpui::{Menu, MenuItem};
pub fn app_menus() -> Vec<Menu> {
use crate::actions::Quit;
- vec![Menu {
- name: "Storybook".into(),
- items: vec![MenuItem::action("Quit", Quit)],
- }]
+ vec![Menu::new("Storybook").items([MenuItem::action("Quit", Quit)])]
}
@@ -14,7 +14,7 @@ path = "src/sum_tree.rs"
doctest = false
[dependencies]
-arrayvec = "0.7.1"
+heapless.workspace = true
rayon.workspace = true
log.workspace = true
ztracing.workspace = true
@@ -1,5 +1,5 @@
use super::*;
-use arrayvec::ArrayVec;
+use heapless::Vec as ArrayVec;
use std::{cmp::Ordering, mem, sync::Arc};
use ztracing::instrument;
@@ -29,7 +29,7 @@ impl<T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for StackEntry<'_, T, D> {
#[derive(Clone)]
pub struct Cursor<'a, 'b, T: Item, D> {
tree: &'a SumTree<T>,
- stack: ArrayVec<StackEntry<'a, T, D>, 16>,
+ stack: ArrayVec<StackEntry<'a, T, D>, 16, u8>,
pub position: D,
did_seek: bool,
at_end: bool,
@@ -53,7 +53,7 @@ where
pub struct Iter<'a, T: Item> {
tree: &'a SumTree<T>,
- stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
+ stack: ArrayVec<StackEntry<'a, T, ()>, 16, u8>,
}
impl<'a, 'b, T, D> Cursor<'a, 'b, T, D>
@@ -231,11 +231,13 @@ where
self.position = D::zero(self.cx);
self.at_end = self.tree.is_empty();
if !self.tree.is_empty() {
- self.stack.push(StackEntry {
- tree: self.tree,
- index: self.tree.0.child_summaries().len() as u32,
- position: D::from_summary(self.tree.summary(), self.cx),
- });
+ self.stack
+ .push(StackEntry {
+ tree: self.tree,
+ index: self.tree.0.child_summaries().len() as u32,
+ position: D::from_summary(self.tree.summary(), self.cx),
+ })
+ .unwrap_oob();
}
}
@@ -267,11 +269,13 @@ where
Node::Internal { child_trees, .. } => {
if descending {
let tree = &child_trees[entry.index()];
- self.stack.push(StackEntry {
- position: D::zero(self.cx),
- tree,
- index: tree.0.child_summaries().len() as u32 - 1,
- })
+ self.stack
+ .push(StackEntry {
+ position: D::zero(self.cx),
+ tree,
+ index: tree.0.child_summaries().len() as u32 - 1,
+ })
+ .unwrap_oob();
}
}
Node::Leaf { .. } => {
@@ -297,11 +301,13 @@ where
if self.stack.is_empty() {
if !self.at_end {
- self.stack.push(StackEntry {
- tree: self.tree,
- index: 0,
- position: D::zero(self.cx),
- });
+ self.stack
+ .push(StackEntry {
+ tree: self.tree,
+ index: 0,
+ position: D::zero(self.cx),
+ })
+ .unwrap_oob();
descend = true;
}
self.did_seek = true;
@@ -361,11 +367,13 @@ where
if let Some(subtree) = new_subtree {
descend = true;
- self.stack.push(StackEntry {
- tree: subtree,
- index: 0,
- position: self.position.clone(),
- });
+ self.stack
+ .push(StackEntry {
+ tree: subtree,
+ index: 0,
+ position: self.position.clone(),
+ })
+ .unwrap_oob();
} else {
descend = false;
self.stack.pop();
@@ -467,11 +475,13 @@ where
if !self.did_seek {
self.did_seek = true;
- self.stack.push(StackEntry {
- tree: self.tree,
- index: 0,
- position: D::zero(self.cx),
- });
+ self.stack
+ .push(StackEntry {
+ tree: self.tree,
+ index: 0,
+ position: D::zero(self.cx),
+ })
+ .unwrap_oob();
}
let mut ascending = false;
@@ -503,11 +513,13 @@ where
entry.index += 1;
entry.position = self.position.clone();
} else {
- self.stack.push(StackEntry {
- tree: child_tree,
- index: 0,
- position: self.position.clone(),
- });
+ self.stack
+ .push(StackEntry {
+ tree: child_tree,
+ index: 0,
+ position: self.position.clone(),
+ })
+ .unwrap_oob();
ascending = false;
continue 'outer;
}
@@ -578,11 +590,13 @@ impl<'a, T: Item> Iterator for Iter<'a, T> {
let mut descend = false;
if self.stack.is_empty() {
- self.stack.push(StackEntry {
- tree: self.tree,
- index: 0,
- position: (),
- });
+ self.stack
+ .push(StackEntry {
+ tree: self.tree,
+ index: 0,
+ position: (),
+ })
+ .unwrap_oob();
descend = true;
}
@@ -611,11 +625,13 @@ impl<'a, T: Item> Iterator for Iter<'a, T> {
if let Some(subtree) = new_subtree {
descend = true;
- self.stack.push(StackEntry {
- tree: subtree,
- index: 0,
- position: (),
- });
+ self.stack
+ .push(StackEntry {
+ tree: subtree,
+ index: 0,
+ position: (),
+ })
+ .unwrap_oob();
} else {
descend = false;
self.stack.pop();
@@ -748,8 +764,8 @@ trait SeekAggregate<'a, T: Item> {
struct SliceSeekAggregate<T: Item> {
tree: SumTree<T>,
- leaf_items: ArrayVec<T, { 2 * TREE_BASE }>,
- leaf_item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }>,
+ leaf_items: ArrayVec<T, { 2 * TREE_BASE }, u8>,
+ leaf_item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8>,
leaf_summary: T::Summary,
}
@@ -786,8 +802,8 @@ impl<T: Item> SeekAggregate<'_, T> for SliceSeekAggregate<T> {
summary: &T::Summary,
cx: <T::Summary as Summary>::Context<'_>,
) {
- self.leaf_items.push(item.clone());
- self.leaf_item_summaries.push(summary.clone());
+ self.leaf_items.push(item.clone()).unwrap_oob();
+ self.leaf_item_summaries.push(summary.clone()).unwrap_oob();
Summary::add_summary(&mut self.leaf_summary, summary, cx);
}
fn push_tree(
@@ -3,8 +3,8 @@ mod cursor;
pub mod property_test;
mod tree_map;
-use arrayvec::ArrayVec;
pub use cursor::{Cursor, FilterCursor, Iter};
+use heapless::Vec as ArrayVec;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator as _};
use std::marker::PhantomData;
use std::mem;
@@ -17,6 +17,17 @@ pub const TREE_BASE: usize = 2;
#[cfg(not(test))]
pub const TREE_BASE: usize = 6;
+// Helper for when we cannot use ArrayVec::<T>::push().unwrap() as T doesn't impl Debug
+trait CapacityResultExt {
+ fn unwrap_oob(self);
+}
+
+impl<T> CapacityResultExt for Result<(), T> {
+ fn unwrap_oob(self) {
+ self.unwrap_or_else(|_| panic!("item should fit into fixed size ArrayVec"))
+ }
+}
+
/// An item that can be stored in a [`SumTree`]
///
/// Must be summarized by a type that implements [`Summary`]
@@ -243,8 +254,9 @@ impl<T: Item> SumTree<T> {
let mut iter = iter.into_iter().fuse().peekable();
while iter.peek().is_some() {
- let items: ArrayVec<T, { 2 * TREE_BASE }> = iter.by_ref().take(2 * TREE_BASE).collect();
- let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> =
+ let items: ArrayVec<T, { 2 * TREE_BASE }, u8> =
+ iter.by_ref().take(2 * TREE_BASE).collect();
+ let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8> =
items.iter().map(|item| item.summary(cx)).collect();
let mut summary = item_summaries[0].clone();
@@ -284,8 +296,8 @@ impl<T: Item> SumTree<T> {
};
let child_summary = child_node.summary();
<T::Summary as Summary>::add_summary(summary, child_summary, cx);
- child_summaries.push(child_summary.clone());
- child_trees.push(child_node);
+ child_summaries.push(child_summary.clone()).unwrap_oob();
+ child_trees.push(child_node.clone()).unwrap_oob();
if child_trees.len() == 2 * TREE_BASE {
parent_nodes.extend(current_parent_node.take());
@@ -315,8 +327,8 @@ impl<T: Item> SumTree<T> {
.into_par_iter()
.chunks(2 * TREE_BASE)
.map(|items| {
- let items: ArrayVec<T, { 2 * TREE_BASE }> = items.into_iter().collect();
- let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> =
+ let items: ArrayVec<T, { 2 * TREE_BASE }, u8> = items.into_iter().collect();
+ let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8> =
items.iter().map(|item| item.summary(cx)).collect();
let mut summary = item_summaries[0].clone();
for item_summary in &item_summaries[1..] {
@@ -337,9 +349,9 @@ impl<T: Item> SumTree<T> {
.into_par_iter()
.chunks(2 * TREE_BASE)
.map(|child_nodes| {
- let child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }> =
+ let child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }, u8> =
child_nodes.into_iter().collect();
- let child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> = child_trees
+ let child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8> = child_trees
.iter()
.map(|child_tree| child_tree.summary().clone())
.collect();
@@ -798,14 +810,16 @@ impl<T: Item> SumTree<T> {
<T::Summary as Summary>::add_summary(summary, other_node.summary(), cx);
let height_delta = *height - other_node.height();
- let mut summaries_to_append = ArrayVec::<T::Summary, { 2 * TREE_BASE }>::new();
- let mut trees_to_append = ArrayVec::<SumTree<T>, { 2 * TREE_BASE }>::new();
+ let mut summaries_to_append = ArrayVec::<T::Summary, { 2 * TREE_BASE }, u8>::new();
+ let mut trees_to_append = ArrayVec::<SumTree<T>, { 2 * TREE_BASE }, u8>::new();
if height_delta == 0 {
summaries_to_append.extend(other_node.child_summaries().iter().cloned());
trees_to_append.extend(other_node.child_trees().iter().cloned());
} else if height_delta == 1 && !other_node.is_underflowing() {
- summaries_to_append.push(other_node.summary().clone());
- trees_to_append.push(other)
+ summaries_to_append
+ .push(other_node.summary().clone())
+ .unwrap_oob();
+ trees_to_append.push(other).unwrap_oob();
} else {
let tree_to_append = child_trees
.last_mut()
@@ -815,15 +829,17 @@ impl<T: Item> SumTree<T> {
child_trees.last().unwrap().0.summary().clone();
if let Some(split_tree) = tree_to_append {
- summaries_to_append.push(split_tree.0.summary().clone());
- trees_to_append.push(split_tree);
+ summaries_to_append
+ .push(split_tree.0.summary().clone())
+ .unwrap_oob();
+ trees_to_append.push(split_tree).unwrap_oob();
}
}
let child_count = child_trees.len() + trees_to_append.len();
if child_count > 2 * TREE_BASE {
- let left_summaries: ArrayVec<_, { 2 * TREE_BASE }>;
- let right_summaries: ArrayVec<_, { 2 * TREE_BASE }>;
+ let left_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8>;
+ let right_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8>;
let left_trees;
let right_trees;
@@ -868,7 +884,7 @@ impl<T: Item> SumTree<T> {
let left_items;
let right_items;
let left_summaries;
- let right_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }>;
+ let right_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8>;
let midpoint = (child_count + child_count % 2) / 2;
{
@@ -933,8 +949,10 @@ impl<T: Item> SumTree<T> {
*child_summaries.first_mut().unwrap() = first.summary().clone();
if let Some(tree) = res {
if child_trees.len() < 2 * TREE_BASE {
- child_summaries.insert(0, tree.summary().clone());
- child_trees.insert(0, tree);
+ child_summaries
+ .insert(0, tree.summary().clone())
+ .unwrap_oob();
+ child_trees.insert(0, tree).unwrap_oob();
None
} else {
let new_child_summaries = {
@@ -1016,7 +1034,7 @@ impl<T: Item> SumTree<T> {
.iter()
.chain(child_summaries.iter())
.cloned();
- let left_summaries: ArrayVec<_, { 2 * TREE_BASE }> =
+ let left_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8> =
all_summaries.by_ref().take(midpoint).collect();
*child_summaries = all_summaries.collect();
@@ -1065,7 +1083,7 @@ impl<T: Item> SumTree<T> {
.iter()
.chain(item_summaries.iter())
.cloned();
- let left_summaries: ArrayVec<_, { 2 * TREE_BASE }> =
+ let left_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8> =
all_summaries.by_ref().take(midpoint).collect();
*item_summaries = all_summaries.collect();
@@ -1088,11 +1106,11 @@ impl<T: Item> SumTree<T> {
) -> Self {
let height = left.0.height() + 1;
let mut child_summaries = ArrayVec::new();
- child_summaries.push(left.0.summary().clone());
- child_summaries.push(right.0.summary().clone());
+ child_summaries.push(left.0.summary().clone()).unwrap_oob();
+ child_summaries.push(right.0.summary().clone()).unwrap_oob();
let mut child_trees = ArrayVec::new();
- child_trees.push(left);
- child_trees.push(right);
+ child_trees.push(left).unwrap_oob();
+ child_trees.push(right).unwrap_oob();
SumTree(Arc::new(Node::Internal {
height,
summary: sum(child_summaries.iter(), cx),
@@ -1252,13 +1270,13 @@ pub enum Node<T: Item> {
Internal {
height: u8,
summary: T::Summary,
- child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }>,
- child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }>,
+ child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8>,
+ child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }, u8>,
},
Leaf {
summary: T::Summary,
- items: ArrayVec<T, { 2 * TREE_BASE }>,
- item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }>,
+ items: ArrayVec<T, { 2 * TREE_BASE }, u8>,
+ item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }, u8>,
},
}
@@ -1323,14 +1341,14 @@ impl<T: Item> Node<T> {
}
}
- fn child_trees(&self) -> &ArrayVec<SumTree<T>, { 2 * TREE_BASE }> {
+ fn child_trees(&self) -> &ArrayVec<SumTree<T>, { 2 * TREE_BASE }, u8> {
match self {
Node::Internal { child_trees, .. } => child_trees,
Node::Leaf { .. } => panic!("Leaf nodes have no child trees"),
}
}
- fn items(&self) -> &ArrayVec<T, { 2 * TREE_BASE }> {
+ fn items(&self) -> &ArrayVec<T, { 2 * TREE_BASE }, u8> {
match self {
Node::Leaf { items, .. } => items,
Node::Internal { .. } => panic!("Internal nodes have no items"),
@@ -23,7 +23,7 @@ pub use debug_format::{
Request, TcpArgumentsTemplate, ZedDebugConfig,
};
pub use task_template::{
- DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates,
+ DebugArgsRequest, HideStrategy, RevealStrategy, SaveStrategy, TaskTemplate, TaskTemplates,
substitute_variables_in_map, substitute_variables_in_str,
};
pub use util::shell::{Shell, ShellKind};
@@ -75,6 +75,8 @@ pub struct SpawnInTerminal {
pub show_command: bool,
/// Whether to show the rerun button in the terminal tab.
pub show_rerun: bool,
+ /// Which edited buffers to save before running the task.
+ pub save: SaveStrategy,
}
impl SpawnInTerminal {
@@ -72,6 +72,9 @@ pub struct TaskTemplate {
/// Whether to show the command line in the task output.
#[serde(default = "default_true")]
pub show_command: bool,
+ /// Which edited buffers to save before running the task.
+ #[serde(default)]
+ pub save: SaveStrategy,
}
#[derive(Deserialize, Eq, PartialEq, Clone, Debug)]
@@ -109,6 +112,19 @@ pub enum HideStrategy {
OnSuccess,
}
+/// Which edited buffers to save before running a task.
+#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum SaveStrategy {
+ #[default]
+ /// Save all edited buffers.
+ All,
+ /// Save the current buffer.
+ Current,
+ /// Don't save any buffers.
+ None,
+}
+
/// A group of Tasks defined in a JSON file.
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
pub struct TaskTemplates(pub Vec<TaskTemplate>);
@@ -271,6 +287,7 @@ impl TaskTemplate {
show_summary: self.show_summary,
show_command: self.show_command,
show_rerun: true,
+ save: self.save,
},
})
}
@@ -1072,7 +1089,6 @@ mod tests {
command,
..TaskTemplate::default()
};
-
assert!(task.unknown_variables().is_empty());
}
}
@@ -425,7 +425,7 @@ impl Domain for TerminalDb {
];
}
-db::static_connection!(TERMINAL_DB, TerminalDb, [WorkspaceDb]);
+db::static_connection!(TerminalDb, [WorkspaceDb]);
impl TerminalDb {
query! {
@@ -8,7 +8,7 @@ use crate::{
};
use breadcrumbs::Breadcrumbs;
use collections::HashMap;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use futures::{channel::oneshot, future::join_all};
use gpui::{
Action, AnyView, App, AsyncApp, AsyncWindowContext, Context, Corner, Entity, EventEmitter,
@@ -250,16 +250,17 @@ impl TerminalPanel {
) -> Result<Entity<Self>> {
let mut terminal_panel = None;
- if let Some((database_id, serialization_key)) = workspace
- .read_with(&cx, |workspace, _| {
+ if let Some((database_id, serialization_key, kvp)) = workspace
+ .read_with(&cx, |workspace, cx| {
workspace
.database_id()
.zip(TerminalPanel::serialization_key(workspace))
+ .map(|(id, key)| (id, key, KeyValueStore::global(cx)))
})
.ok()
.flatten()
&& let Some(serialized_panel) = cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
+ .background_spawn(async move { kvp.read_kvp(&serialization_key) })
.await
.log_err()
.flatten()
@@ -939,6 +940,7 @@ impl TerminalPanel {
else {
return;
};
+ let kvp = KeyValueStore::global(cx);
self.pending_serialization = cx.spawn(async move |terminal_panel, cx| {
cx.background_executor()
.timer(Duration::from_millis(50))
@@ -953,17 +955,16 @@ impl TerminalPanel {
});
cx.background_spawn(
async move {
- KEY_VALUE_STORE
- .write_kvp(
- serialization_key,
- serde_json::to_string(&SerializedTerminalPanel {
- items,
- active_item_id: None,
- height,
- width,
- })?,
- )
- .await?;
+ kvp.write_kvp(
+ serialization_key,
+ serde_json::to_string(&SerializedTerminalPanel {
+ items,
+ active_item_id: None,
+ height,
+ width,
+ })?,
+ )
+ .await?;
anyhow::Ok(())
}
.log_err(),
@@ -15,7 +15,7 @@ use gpui::{
};
use itertools::Itertools;
use menu;
-use persistence::TERMINAL_DB;
+use persistence::TerminalDb;
use project::{Project, ProjectEntryId, search::SearchQuery};
use schemars::JsonSchema;
use serde::Deserialize;
@@ -813,17 +813,16 @@ impl TerminalView {
return;
};
- if clipboard.entries().iter().any(|entry| match entry {
- ClipboardEntry::Image(image) => !image.bytes.is_empty(),
- _ => false,
- }) {
- self.forward_ctrl_v(cx);
- return;
- }
-
- if let Some(text) = clipboard.text() {
- self.terminal
- .update(cx, |terminal, _cx| terminal.paste(&text));
+ match clipboard.entries().first() {
+ Some(ClipboardEntry::Image(image)) if !image.bytes.is_empty() => {
+ self.forward_ctrl_v(cx);
+ }
+ _ => {
+ if let Some(text) = clipboard.text() {
+ self.terminal
+ .update(cx, |terminal, _cx| terminal.paste(&text));
+ }
+ }
}
}
@@ -1676,11 +1675,11 @@ impl Item for TerminalView {
log::debug!(
"Updating workspace id for the terminal, old: {old_id:?}, new: {new_id:?}",
);
- cx.background_spawn(TERMINAL_DB.update_workspace_id(
- new_id,
- old_id,
- cx.entity_id().as_u64(),
- ))
+ let db = TerminalDb::global(cx);
+ let entity_id = cx.entity_id().as_u64();
+ cx.background_spawn(async move {
+ db.update_workspace_id(new_id, old_id, entity_id).await
+ })
.detach();
}
self.workspace_id = workspace.database_id();
@@ -1703,7 +1702,8 @@ impl SerializableItem for TerminalView {
_window: &mut Window,
cx: &mut App,
) -> Task<anyhow::Result<()>> {
- delete_unloaded_items(alive_items, workspace_id, "terminals", &TERMINAL_DB, cx)
+ let db = TerminalDb::global(cx);
+ delete_unloaded_items(alive_items, workspace_id, "terminals", &db, cx)
}
fn serialize(
@@ -1728,14 +1728,13 @@ impl SerializableItem for TerminalView {
let custom_title = self.custom_title.clone();
self.needs_serialize = false;
+ let db = TerminalDb::global(cx);
Some(cx.background_spawn(async move {
if let Some(cwd) = cwd {
- TERMINAL_DB
- .save_working_directory(item_id, workspace_id, cwd)
+ db.save_working_directory(item_id, workspace_id, cwd)
.await?;
}
- TERMINAL_DB
- .save_custom_title(item_id, workspace_id, custom_title)
+ db.save_custom_title(item_id, workspace_id, custom_title)
.await?;
Ok(())
}))
@@ -1756,7 +1755,8 @@ impl SerializableItem for TerminalView {
window.spawn(cx, async move |cx| {
let (cwd, custom_title) = cx
.update(|_window, cx| {
- let from_db = TERMINAL_DB
+ let db = TerminalDb::global(cx);
+ let from_db = db
.get_working_directory(item_id, workspace_id)
.log_err()
.flatten();
@@ -1770,7 +1770,7 @@ impl SerializableItem for TerminalView {
.upgrade()
.and_then(|workspace| default_working_directory(workspace.read(cx), cx))
};
- let custom_title = TERMINAL_DB
+ let custom_title = db
.get_custom_title(item_id, workspace_id)
.log_err()
.flatten()
@@ -749,6 +749,48 @@ fn test_concurrent_edits() {
assert_eq!(buffer3.text(), "a12c34e56");
}
+// Regression test: applying a remote edit whose FullOffset range partially
+// overlaps a fragment that was already deleted (observed but not visible)
+// used to leave the fragment unsplit, causing the rope builder to read past
+// the end of the rope.
+#[test]
+fn test_edit_partially_intersecting_a_deleted_fragment() {
+ let mut buffer = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), "abcdefgh");
+
+ // Delete "cde", creating a single deleted fragment at FullOffset 2..5.
+ // After this the fragment layout is:
+ // "ab"(vis, FullOffset 0..2) "cde"(del, 2..5) "fgh"(vis, 5..8)
+ buffer.edit([(2..5, "")]);
+ assert_eq!(buffer.text(), "abfgh");
+
+ // Construct a synthetic remote edit whose version includes the deletion (so
+ // the "cde" fragment is observed + deleted → !was_visible) but whose
+ // FullOffset range only partially overlaps it. This state arises in
+ // production when concurrent edits cause different fragment splits on
+ // different replicas.
+ let synthetic_timestamp = clock::Lamport {
+ replica_id: ReplicaId::new(2),
+ value: 10,
+ };
+ let synthetic_edit = Operation::Edit(EditOperation {
+ timestamp: synthetic_timestamp,
+ version: buffer.version(),
+ // Range 1..4 partially overlaps the deleted "cde" (FullOffset 2..5):
+ // it covers "b" (1..2) and only "cd" (2..4), leaving "e" (4..5) out.
+ ranges: vec![FullOffset(1)..FullOffset(4)],
+ new_text: vec!["".into()],
+ });
+
+ // Without the fix this panics with "cannot summarize past end of rope"
+ // because the full 3-byte "cde" fragment is consumed from the deleted
+ // rope instead of only the 2-byte intersection.
+ buffer.apply_ops([synthetic_edit]);
+ assert_eq!(buffer.text(), "afgh");
+
+ buffer.undo_operations([(synthetic_timestamp, u32::MAX)].into_iter().collect());
+ assert_eq!(buffer.text(), "abfgh");
+}
+
#[gpui::test(iterations = 100)]
fn test_random_concurrent_edits(mut rng: StdRng) {
let peers = env::var("PEERS")
@@ -1234,15 +1234,18 @@ impl Buffer {
let fragment_end = old_fragments.end().0.full_offset();
let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end);
- if fragment.was_visible(version, &self.undo_map) {
+ if version.observed(fragment.timestamp) {
intersection.len = (intersection_end.0 - fragment_start.0) as u32;
intersection.insertion_offset +=
(fragment_start - old_fragments.start().0.full_offset()) as u32;
intersection.id =
Locator::between(&new_fragments.summary().max_id, &intersection.id);
- intersection.deletions.push(timestamp);
- intersection.visible = false;
- insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection));
+ if fragment.was_visible(version, &self.undo_map) {
+ intersection.deletions.push(timestamp);
+ intersection.visible = false;
+ insertion_slices
+ .push(InsertionSlice::from_fragment(timestamp, &intersection));
+ }
}
if intersection.len > 0 {
if fragment.visible && !intersection.visible {
@@ -38,13 +38,15 @@ chrono.workspace = true
client.workspace = true
cloud_api_types.workspace = true
db.workspace = true
-feature_flags.workspace = true
git_ui.workspace = true
gpui = { workspace = true, features = ["screen-capture"] }
+icons.workspace = true
+livekit_client.workspace = true
notifications.workspace = true
project.workspace = true
recent_projects.workspace = true
remote.workspace = true
+remote_connection.workspace = true
rpc.workspace = true
semver.workspace = true
schemars.workspace = true
@@ -114,8 +114,9 @@ impl ApplicationMenu {
name,
action,
checked,
+ disabled,
..
- } => menu.action_checked(name, action, checked),
+ } => menu.action_checked_with_disabled(name, action, checked, disabled),
OwnedMenuItem::Submenu(submenu) => {
submenu
.items
@@ -126,8 +127,10 @@ impl ApplicationMenu {
name,
action,
checked,
+ disabled,
..
- } => menu.action_checked(name, action, checked),
+ } => menu
+ .action_checked_with_disabled(name, action, checked, disabled),
OwnedMenuItem::Submenu(_) => menu,
OwnedMenuItem::SystemMenu(_) => {
// A system menu doesn't make sense in this context, so ignore it
@@ -9,7 +9,10 @@ use gpui::{
canvas, point,
};
use gpui::{App, Task, Window};
+use icons::IconName;
+use livekit_client::ConnectionQuality;
use project::WorktreeSettings;
+use remote_connection::RemoteConnectionModal;
use rpc::proto::{self};
use settings::{Settings as _, SettingsLocation};
use theme::ActiveTheme;
@@ -19,9 +22,17 @@ use ui::{
};
use util::rel_path::RelPath;
use workspace::{ParticipantLocation, notifications::DetachAndPromptErr};
+use zed_actions::ShowCallStats;
use crate::TitleBar;
+fn format_stat(value: Option<f64>, format: impl Fn(f64) -> String) -> String {
+ match value {
+ Some(v) => format(v),
+ None => "—".to_string(),
+ }
+}
+
pub fn toggle_screen_sharing(
screen: anyhow::Result<Option<Rc<dyn ScreenCaptureSource>>>,
window: &mut Window,
@@ -332,7 +343,11 @@ impl TitleBar {
let is_connecting_to_project = self
.workspace
- .update(cx, |workspace, cx| workspace.has_active_modal(window, cx))
+ .update(cx, |workspace, cx| {
+ workspace
+ .active_modal::<RemoteConnectionModal>(cx)
+ .is_some()
+ })
.unwrap_or(false);
let room = room.read(cx);
@@ -347,6 +362,11 @@ impl TitleBar {
let can_share_projects = room.can_share_projects();
let screen_sharing_supported = cx.is_screen_capture_supported();
+ let stats = room
+ .diagnostics()
+ .map(|d| d.read(cx).stats().clone())
+ .unwrap_or_default();
+
let channel_store = ChannelStore::global(cx);
let channel = room
.channel_id()
@@ -354,6 +374,45 @@ impl TitleBar {
let mut children = Vec::new();
+ let effective_quality = stats.effective_quality.unwrap_or(ConnectionQuality::Lost);
+ let (signal_icon, signal_color, quality_label) = match effective_quality {
+ ConnectionQuality::Excellent => {
+ (IconName::SignalHigh, Some(Color::Success), "Excellent")
+ }
+ ConnectionQuality::Good => (IconName::SignalHigh, None, "Good"),
+ ConnectionQuality::Poor => (IconName::SignalMedium, Some(Color::Warning), "Poor"),
+ ConnectionQuality::Lost => (IconName::SignalLow, Some(Color::Error), "Lost"),
+ };
+ let quality_label: SharedString = quality_label.into();
+ children.push(
+ IconButton::new("call-quality", signal_icon)
+ .style(ButtonStyle::Subtle)
+ .icon_size(IconSize::Small)
+ .when_some(signal_color, |button, color| button.icon_color(color))
+ .tooltip(move |_window, cx| {
+ let quality_label = quality_label.clone();
+ let latency = format_stat(stats.latency_ms, |v| format!("{:.0}ms", v));
+ let jitter = format_stat(stats.jitter_ms, |v| format!("{:.0}ms", v));
+ let packet_loss = format_stat(stats.packet_loss_pct, |v| format!("{:.1}%", v));
+ let input_lag =
+ format_stat(stats.input_lag.map(|d| d.as_secs_f64() * 1000.0), |v| {
+ format!("{:.1}ms", v)
+ });
+
+ Tooltip::with_meta(
+ format!("Connection: {quality_label}"),
+ Some(&ShowCallStats),
+ format!(
+ "Latency: {latency} · Jitter: {jitter} · Loss: {packet_loss} · Input lag: {input_lag}",
+ ),
+ cx,
+ )
+ })
+ .on_click(move |_, window, cx| {
+ window.dispatch_action(Box::new(ShowCallStats), cx);
+ })
+ .into_any_element(),
+ );
children.push(
h_flex()
.gap_1()
@@ -489,6 +548,11 @@ impl TitleBar {
);
if can_use_microphone && screen_sharing_supported {
+ #[cfg(target_os = "linux")]
+ let is_wayland = gpui::guess_compositor() == "Wayland";
+ #[cfg(not(target_os = "linux"))]
+ let is_wayland = false;
+
let trigger = IconButton::new("screen-share", IconName::Screen)
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
@@ -505,28 +569,56 @@ impl TitleBar {
.room()
.is_some_and(|room| !room.read(cx).is_sharing_screen());
- window
- .spawn(cx, async move |cx| {
- let screen = if should_share {
- cx.update(|_, cx| pick_default_screen(cx))?.await
- } else {
- Ok(None)
- };
- cx.update(|window, cx| toggle_screen_sharing(screen, window, cx))?;
+ #[cfg(target_os = "linux")]
+ {
+ if is_wayland
+ && let Some(room) = ActiveCall::global(cx).read(cx).room().cloned()
+ {
+ let task = room.update(cx, |room, cx| {
+ if should_share {
+ room.share_screen_wayland(cx)
+ } else {
+ room.unshare_screen(true, cx)
+ .map(|()| Task::ready(Ok(())))
+ .unwrap_or_else(|e| Task::ready(Err(e)))
+ }
+ });
+ task.detach_and_prompt_err(
+ "Sharing Screen Failed",
+ window,
+ cx,
+ |e, _, _| Some(format!("{e:?}")),
+ );
+ }
+ }
+ if !is_wayland {
+ window
+ .spawn(cx, async move |cx| {
+ let screen = if should_share {
+ cx.update(|_, cx| pick_default_screen(cx))?.await
+ } else {
+ Ok(None)
+ };
+ cx.update(|window, cx| toggle_screen_sharing(screen, window, cx))?;
- Result::<_, anyhow::Error>::Ok(())
- })
- .detach();
+ Result::<_, anyhow::Error>::Ok(())
+ })
+ .detach();
+ }
});
- children.push(
- SplitButton::new(
- trigger.render(window, cx),
- self.render_screen_list().into_any_element(),
- )
- .style(SplitButtonStyle::Transparent)
- .into_any_element(),
- );
+ if is_wayland {
+ children.push(trigger.into_any_element());
+ } else {
+ children.push(
+ SplitButton::new(
+ trigger.render(window, cx),
+ self.render_screen_list().into_any_element(),
+ )
+ .style(SplitButtonStyle::Transparent)
+ .into_any_element(),
+ );
+ }
}
children.push(div().pr_2().into_any_element());
@@ -44,7 +44,7 @@ impl OnboardingBanner {
subtitle: subtitle.or(Some(SharedString::from("Introducing:"))),
},
visible_when: None,
- dismissed: get_dismissed(source),
+ dismissed: get_dismissed(source, cx),
}
}
@@ -75,9 +75,9 @@ fn dismissed_at_key(source: &str) -> String {
}
}
-fn get_dismissed(source: &str) -> bool {
+fn get_dismissed(source: &str, cx: &App) -> bool {
let dismissed_at = dismissed_at_key(source);
- db::kvp::KEY_VALUE_STORE
+ db::kvp::KeyValueStore::global(cx)
.read_kvp(&dismissed_at)
.log_err()
.is_some_and(|dismissed| dismissed.is_some())
@@ -85,9 +85,10 @@ fn get_dismissed(source: &str) -> bool {
fn persist_dismissed(source: &str, cx: &mut App) {
let dismissed_at = dismissed_at_key(source);
- cx.spawn(async |_| {
+ let kvp = db::kvp::KeyValueStore::global(cx);
+ cx.spawn(async move |_| {
let time = chrono::Utc::now().to_rfc3339();
- db::kvp::KEY_VALUE_STORE.write_kvp(dismissed_at, time).await
+ kvp.write_kvp(dismissed_at, time).await
})
.detach_and_log_err(cx);
}
@@ -105,7 +106,8 @@ pub fn restore_banner(cx: &mut App) {
let source = &cx.global::<BannerGlobal>().entity.read(cx).source;
let dismissed_at = dismissed_at_key(source);
- cx.spawn(async |_| db::kvp::KEY_VALUE_STORE.delete_kvp(dismissed_at).await)
+ let kvp = db::kvp::KeyValueStore::global(cx);
+ cx.spawn(async move |_| kvp.delete_kvp(dismissed_at).await)
.detach_and_log_err(cx);
}
@@ -14,6 +14,7 @@ pub use platform_title_bar::{
self, DraggedWindowTab, MergeAllWindows, MoveTabToNewWindow, PlatformTitleBar,
ShowNextWindowTab, ShowPreviousWindowTab,
};
+use project::linked_worktree_short_name;
#[cfg(not(target_os = "macos"))]
use crate::application_menu::{
@@ -24,16 +25,14 @@ use auto_update::AutoUpdateStatus;
use call::ActiveCall;
use client::{Client, UserStore, zed_urls};
use cloud_api_types::Plan;
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
+
use gpui::{
Action, AnyElement, App, Context, Corner, Element, Empty, Entity, Focusable,
InteractiveElement, IntoElement, MouseButton, ParentElement, Render,
StatefulInteractiveElement, Styled, Subscription, WeakEntity, Window, actions, div,
};
use onboarding_banner::OnboardingBanner;
-use project::{
- DisableAiSettings, Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees,
-};
+use project::{Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees};
use remote::RemoteConnectionOptions;
use settings::Settings;
use settings::WorktreeId;
@@ -42,14 +41,13 @@ use std::sync::Arc;
use theme::ActiveTheme;
use title_bar_settings::TitleBarSettings;
use ui::{
- Avatar, ButtonLike, ContextMenu, Divider, IconWithIndicator, Indicator, PopoverMenu,
- PopoverMenuHandle, TintColor, Tooltip, prelude::*, utils::platform_title_bar_height,
+ Avatar, ButtonLike, ContextMenu, IconWithIndicator, Indicator, PopoverMenu, PopoverMenuHandle,
+ TintColor, Tooltip, prelude::*, utils::platform_title_bar_height,
};
use update_version::UpdateVersion;
use util::ResultExt;
use workspace::{
- MultiWorkspace, ToggleWorkspaceSidebar, ToggleWorktreeSecurity, Workspace, WorkspaceId,
- notifications::NotifyResultExt,
+ MultiWorkspace, ToggleWorktreeSecurity, Workspace, WorkspaceId, notifications::NotifyResultExt,
};
use zed_actions::OpenRemote;
@@ -158,6 +156,7 @@ pub struct TitleBar {
banner: Entity<OnboardingBanner>,
update_version: Entity<UpdateVersion>,
screen_share_popover_handle: PopoverMenuHandle<ContextMenu>,
+ _diagnostics_subscription: Option<gpui::Subscription>,
}
impl Render for TitleBar {
@@ -168,6 +167,26 @@ impl Render for TitleBar {
let mut children = Vec::new();
+ let mut project_name = None;
+ let mut repository = None;
+ let mut linked_worktree_name = None;
+ if let Some(worktree) = self.effective_active_worktree(cx) {
+ repository = self.get_repository_for_worktree(&worktree, cx);
+ let worktree = worktree.read(cx);
+ project_name = worktree
+ .root_name()
+ .file_name()
+ .map(|name| SharedString::from(name.to_string()));
+ linked_worktree_name = repository.as_ref().and_then(|repo| {
+ let repo = repo.read(cx);
+ linked_worktree_short_name(
+ repo.original_repo_abs_path.as_ref(),
+ repo.work_directory_abs_path.as_ref(),
+ )
+ .filter(|name| Some(name) != project_name.as_ref())
+ });
+ }
+
children.push(
h_flex()
.h_full()
@@ -176,7 +195,6 @@ impl Render for TitleBar {
let mut render_project_items = title_bar_settings.show_branch_name
|| title_bar_settings.show_project_items;
title_bar
- .children(self.render_workspace_sidebar_toggle(window, cx))
.when_some(
self.application_menu.clone().filter(|_| !show_menus),
|title_bar, menu| {
@@ -191,11 +209,18 @@ impl Render for TitleBar {
.when(title_bar_settings.show_project_items, |title_bar| {
title_bar
.children(self.render_project_host(cx))
- .child(self.render_project_name(window, cx))
- })
- .when(title_bar_settings.show_branch_name, |title_bar| {
- title_bar.children(self.render_project_branch(cx))
+ .child(self.render_project_name(project_name, window, cx))
})
+ .when_some(
+ repository.filter(|_| title_bar_settings.show_branch_name),
+ |title_bar, repository| {
+ title_bar.children(self.render_project_branch(
+ repository,
+ linked_worktree_name,
+ cx,
+ ))
+ },
+ )
})
})
.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
@@ -373,19 +398,15 @@ impl TitleBar {
};
let is_open = multi_workspace.read(cx).sidebar_open();
- let has_notifications = multi_workspace.read(cx).sidebar_has_notifications(cx);
platform_titlebar.update(cx, |titlebar, cx| {
titlebar.set_workspace_sidebar_open(is_open, cx);
- titlebar.set_sidebar_has_notifications(has_notifications, cx);
});
let platform_titlebar = platform_titlebar.clone();
let subscription = cx.observe(&multi_workspace, move |mw, cx| {
let is_open = mw.read(cx).sidebar_open();
- let has_notifications = mw.read(cx).sidebar_has_notifications(cx);
platform_titlebar.update(cx, |titlebar, cx| {
titlebar.set_workspace_sidebar_open(is_open, cx);
- titlebar.set_sidebar_has_notifications(has_notifications, cx);
});
});
@@ -400,7 +421,7 @@ impl TitleBar {
.detach();
}
- Self {
+ let mut this = Self {
platform_titlebar,
application_menu,
workspace: workspace.weak_handle(),
@@ -412,7 +433,12 @@ impl TitleBar {
banner,
update_version,
screen_share_popover_handle: PopoverMenuHandle::default(),
- }
+ _diagnostics_subscription: None,
+ };
+
+ this.observe_diagnostics(cx);
+
+ this
}
fn worktree_count(&self, cx: &App) -> usize {
@@ -486,14 +512,15 @@ impl TitleBar {
let git_store = project.git_store().read(cx);
let worktree_path = worktree.read(cx).abs_path();
- for repo in git_store.repositories().values() {
- let repo_path = &repo.read(cx).work_directory_abs_path;
- if worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref()) {
- return Some(repo.clone());
- }
- }
-
- None
+ git_store
+ .repositories()
+ .values()
+ .filter(|repo| {
+ let repo_path = &repo.read(cx).work_directory_abs_path;
+ worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref())
+ })
+ .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
+ .cloned()
}
fn render_remote_project_connection(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
@@ -689,58 +716,14 @@ impl TitleBar {
)
}
- fn render_workspace_sidebar_toggle(
+ fn render_project_name(
&self,
- _window: &mut Window,
+ name: Option<SharedString>,
+ _: &mut Window,
cx: &mut Context<Self>,
- ) -> Option<AnyElement> {
- if !cx.has_flag::<AgentV2FeatureFlag>() || DisableAiSettings::get_global(cx).disable_ai {
- return None;
- }
-
- let is_sidebar_open = self.platform_titlebar.read(cx).is_workspace_sidebar_open();
-
- if is_sidebar_open {
- return None;
- }
-
- let has_notifications = self.platform_titlebar.read(cx).sidebar_has_notifications();
-
- Some(
- h_flex()
- .h_full()
- .gap_0p5()
- .child(
- IconButton::new(
- "toggle-workspace-sidebar",
- IconName::ThreadsSidebarLeftClosed,
- )
- .icon_size(IconSize::Small)
- .when(has_notifications, |button| {
- button
- .indicator(Indicator::dot().color(Color::Accent))
- .indicator_border_color(Some(cx.theme().colors().title_bar_background))
- })
- .tooltip(move |_, cx| {
- Tooltip::for_action("Open Threads Sidebar", &ToggleWorkspaceSidebar, cx)
- })
- .on_click(|_, window, cx| {
- window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx);
- }),
- )
- .child(Divider::vertical().color(ui::DividerColor::Border))
- .into_any_element(),
- )
- }
-
- pub fn render_project_name(&self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ ) -> impl IntoElement {
let workspace = self.workspace.clone();
- let name = self.effective_active_worktree(cx).map(|worktree| {
- let worktree = worktree.read(cx);
- SharedString::from(worktree.root_name().as_unix_str().to_string())
- });
-
let is_project_selected = name.is_some();
let display_name = if let Some(ref name) = name {
@@ -751,9 +734,16 @@ impl TitleBar {
let is_sidebar_open = self.platform_titlebar.read(cx).is_workspace_sidebar_open();
- if is_sidebar_open {
+ let is_threads_list_view_active = self
+ .multi_workspace
+ .as_ref()
+ .and_then(|mw| mw.upgrade())
+ .map(|mw| mw.read(cx).is_threads_list_view_active(cx))
+ .unwrap_or(false);
+
+ if is_sidebar_open && is_threads_list_view_active {
return self
- .render_project_name_with_sidebar_popover(display_name, is_project_selected, cx)
+ .render_recent_projects_popover(display_name, is_project_selected, cx)
.into_any_element();
}
@@ -762,7 +752,7 @@ impl TitleBar {
.map(|w| w.read(cx).focus_handle(cx))
.unwrap_or_else(|| cx.focus_handle());
- let excluded_workspace_ids: HashSet<WorkspaceId> = self
+ let sibling_workspace_ids: HashSet<WorkspaceId> = self
.multi_workspace
.as_ref()
.and_then(|mw| mw.upgrade())
@@ -779,7 +769,7 @@ impl TitleBar {
.menu(move |window, cx| {
Some(recent_projects::RecentProjects::popover(
workspace.clone(),
- excluded_workspace_ids.clone(),
+ sibling_workspace_ids.clone(),
false,
focus_handle.clone(),
window,
@@ -812,60 +802,79 @@ impl TitleBar {
.into_any_element()
}
- /// When the sidebar is open, the title bar's project name button becomes a
- /// plain button that toggles the sidebar's popover (so the popover is always
- /// anchored to the sidebar). Both buttons show their selected state together.
- fn render_project_name_with_sidebar_popover(
+ fn render_recent_projects_popover(
&self,
display_name: String,
is_project_selected: bool,
cx: &mut Context<Self>,
) -> impl IntoElement {
- let multi_workspace = self.multi_workspace.clone();
+ let workspace = self.workspace.clone();
+
+ let focus_handle = workspace
+ .upgrade()
+ .map(|w| w.read(cx).focus_handle(cx))
+ .unwrap_or_else(|| cx.focus_handle());
- let is_popover_deployed = multi_workspace
+ let sibling_workspace_ids: HashSet<WorkspaceId> = self
+ .multi_workspace
.as_ref()
.and_then(|mw| mw.upgrade())
- .map(|mw| mw.read(cx).is_recent_projects_popover_deployed(cx))
- .unwrap_or(false);
-
- Button::new("project_name_trigger", display_name)
- .label_size(LabelSize::Small)
- .when(self.worktree_count(cx) > 1, |this| {
- this.end_icon(
- Icon::new(IconName::ChevronDown)
- .size(IconSize::XSmall)
- .color(Color::Muted),
- )
+ .map(|mw| {
+ mw.read(cx)
+ .workspaces()
+ .iter()
+ .filter_map(|ws| ws.read(cx).database_id())
+ .collect()
})
- .toggle_state(is_popover_deployed)
- .selected_style(ButtonStyle::Tinted(TintColor::Accent))
- .when(!is_project_selected, |s| s.color(Color::Muted))
- .tooltip(move |_window, cx| {
- Tooltip::for_action(
- "Recent Projects",
- &zed_actions::OpenRecent {
- create_new_window: false,
- },
+ .unwrap_or_default();
+
+ PopoverMenu::new("sidebar-title-recent-projects-menu")
+ .menu(move |window, cx| {
+ Some(recent_projects::RecentProjects::popover(
+ workspace.clone(),
+ sibling_workspace_ids.clone(),
+ false,
+ focus_handle.clone(),
+ window,
cx,
- )
- })
- .on_click(move |_, window, cx| {
- if let Some(mw) = multi_workspace.as_ref().and_then(|mw| mw.upgrade()) {
- mw.update(cx, |mw, cx| {
- mw.toggle_recent_projects_popover(window, cx);
- });
- }
+ ))
})
+ .trigger_with_tooltip(
+ Button::new("project_name_trigger", display_name)
+ .label_size(LabelSize::Small)
+ .when(self.worktree_count(cx) > 1, |this| {
+ this.end_icon(
+ Icon::new(IconName::ChevronDown)
+ .size(IconSize::XSmall)
+ .color(Color::Muted),
+ )
+ })
+ .selected_style(ButtonStyle::Tinted(TintColor::Accent))
+ .when(!is_project_selected, |s| s.color(Color::Muted)),
+ move |_window, cx| {
+ Tooltip::for_action(
+ "Recent Projects",
+ &zed_actions::OpenRecent {
+ create_new_window: false,
+ },
+ cx,
+ )
+ },
+ )
+ .anchor(gpui::Corner::TopLeft)
}
- pub fn render_project_branch(&self, cx: &mut Context<Self>) -> Option<impl IntoElement> {
- let effective_worktree = self.effective_active_worktree(cx)?;
- let repository = self.get_repository_for_worktree(&effective_worktree, cx)?;
+ fn render_project_branch(
+ &self,
+ repository: Entity<project::git_store::Repository>,
+ linked_worktree_name: Option<SharedString>,
+ cx: &mut Context<Self>,
+ ) -> Option<impl IntoElement> {
let workspace = self.workspace.upgrade()?;
let (branch_name, icon_info) = {
let repo = repository.read(cx);
+
let branch_name = repo
.branch
.as_ref()
@@ -898,8 +907,8 @@ impl TitleBar {
(branch_name, icon_info)
};
+ let branch_name = branch_name?;
let settings = TitleBarSettings::get_global(cx);
-
let effective_repository = Some(repository);
Some(
@@ -915,21 +924,42 @@ impl TitleBar {
))
})
.trigger_with_tooltip(
- Button::new("project_branch_trigger", branch_name?)
+ ButtonLike::new("project_branch_trigger")
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
- .label_size(LabelSize::Small)
- .color(Color::Muted)
- .when(settings.show_branch_icon, |branch_button| {
- let (icon, icon_color) = icon_info;
- branch_button.start_icon(
- Icon::new(icon).size(IconSize::Indicator).color(icon_color),
- )
- }),
+ .child(
+ h_flex()
+ .gap_0p5()
+ .when(settings.show_branch_icon, |this| {
+ let (icon, icon_color) = icon_info;
+ this.child(
+ Icon::new(icon).size(IconSize::XSmall).color(icon_color),
+ )
+ })
+ .when_some(linked_worktree_name.as_ref(), |this, worktree_name| {
+ this.child(
+ Label::new(worktree_name)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ .child(
+ Label::new("/").size(LabelSize::Small).color(
+ Color::Custom(
+ cx.theme().colors().text_muted.opacity(0.4),
+ ),
+ ),
+ )
+ })
+ .child(
+ Label::new(branch_name)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ ),
move |_window, cx| {
Tooltip::with_meta(
- "Recent Branches",
+ "Git Switcher",
Some(&zed_actions::git::Branch),
- "Local branches only",
+ "Worktrees, Branches, and Stashes",
cx,
)
},
@@ -956,9 +986,23 @@ impl TitleBar {
}
fn active_call_changed(&mut self, cx: &mut Context<Self>) {
+ self.observe_diagnostics(cx);
cx.notify();
}
+ fn observe_diagnostics(&mut self, cx: &mut Context<Self>) {
+ let diagnostics = ActiveCall::global(cx)
+ .read(cx)
+ .room()
+ .and_then(|room| room.read(cx).diagnostics().cloned());
+
+ if let Some(diagnostics) = diagnostics {
+ self._diagnostics_subscription = Some(cx.observe(&diagnostics, |_, _, cx| cx.notify()));
+ } else {
+ self._diagnostics_subscription = None;
+ }
+ }
+
fn share_project(&mut self, cx: &mut Context<Self>) {
let active_call = ActiveCall::global(cx);
let project = self.project.clone();
@@ -202,15 +202,15 @@ impl ActiveToolchain {
this.worktree_for_id(worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path())
})?;
- workspace::WORKSPACE_DB
- .set_toolchain(
- workspace_id,
- worktree_root_path,
- relative_path.clone(),
- toolchain.clone(),
- )
- .await
- .ok()?;
+ let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx)).ok()?;
+ db.set_toolchain(
+ workspace_id,
+ worktree_root_path,
+ relative_path.clone(),
+ toolchain.clone(),
+ )
+ .await
+ .ok()?;
project
.update(cx, |this, cx| {
this.activate_toolchain(
@@ -920,16 +920,16 @@ impl PickerDelegate for ToolchainSelectorDelegate {
let worktree_abs_path_root = self.worktree_abs_path_root.clone();
let path = self.relative_path.clone();
let relative_path = self.relative_path.clone();
+ let db = workspace::WorkspaceDb::global(cx);
cx.spawn_in(window, async move |_, cx| {
- workspace::WORKSPACE_DB
- .set_toolchain(
- workspace_id,
- worktree_abs_path_root,
- relative_path,
- toolchain.clone(),
- )
- .await
- .log_err();
+ db.set_toolchain(
+ workspace_id,
+ worktree_abs_path_root,
+ relative_path,
+ toolchain.clone(),
+ )
+ .await
+ .log_err();
workspace
.update(cx, |this, cx| {
this.project().update(cx, |this, cx| {
@@ -22,24 +22,26 @@ pub enum AgentThreadStatus {
pub struct ThreadItem {
id: ElementId,
icon: IconName,
+ icon_color: Option<Color>,
+ icon_visible: bool,
custom_icon_from_external_svg: Option<SharedString>,
title: SharedString,
+ title_label_color: Option<Color>,
+ title_generating: bool,
+ highlight_positions: Vec<usize>,
timestamp: SharedString,
notified: bool,
status: AgentThreadStatus,
- generating_title: bool,
selected: bool,
focused: bool,
hovered: bool,
- docked_right: bool,
added: Option<usize>,
removed: Option<usize>,
worktree: Option<SharedString>,
- highlight_positions: Vec<usize>,
+ worktree_full_path: Option<SharedString>,
worktree_highlight_positions: Vec<usize>,
on_click: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
- title_label_color: Option<Color>,
action_slot: Option<AnyElement>,
tooltip: Option<Box<dyn Fn(&mut Window, &mut App) -> AnyView + 'static>>,
}
@@ -49,24 +51,26 @@ impl ThreadItem {
Self {
id: id.into(),
icon: IconName::ZedAgent,
+ icon_color: None,
+ icon_visible: true,
custom_icon_from_external_svg: None,
title: title.into(),
+ title_label_color: None,
+ title_generating: false,
+ highlight_positions: Vec::new(),
timestamp: "".into(),
notified: false,
status: AgentThreadStatus::default(),
- generating_title: false,
selected: false,
focused: false,
hovered: false,
- docked_right: false,
added: None,
removed: None,
worktree: None,
- highlight_positions: Vec::new(),
+ worktree_full_path: None,
worktree_highlight_positions: Vec::new(),
on_click: None,
on_hover: Box::new(|_, _, _| {}),
- title_label_color: None,
action_slot: None,
tooltip: None,
}
@@ -82,6 +86,16 @@ impl ThreadItem {
self
}
+ pub fn icon_color(mut self, color: Color) -> Self {
+ self.icon_color = Some(color);
+ self
+ }
+
+ pub fn icon_visible(mut self, visible: bool) -> Self {
+ self.icon_visible = visible;
+ self
+ }
+
pub fn custom_icon_from_external_svg(mut self, svg: impl Into<SharedString>) -> Self {
self.custom_icon_from_external_svg = Some(svg.into());
self
@@ -97,8 +111,18 @@ impl ThreadItem {
self
}
- pub fn generating_title(mut self, generating: bool) -> Self {
- self.generating_title = generating;
+ pub fn title_generating(mut self, generating: bool) -> Self {
+ self.title_generating = generating;
+ self
+ }
+
+ pub fn title_label_color(mut self, color: Color) -> Self {
+ self.title_label_color = Some(color);
+ self
+ }
+
+ pub fn highlight_positions(mut self, positions: Vec<usize>) -> Self {
+ self.highlight_positions = positions;
self
}
@@ -122,18 +146,13 @@ impl ThreadItem {
self
}
- pub fn docked_right(mut self, docked_right: bool) -> Self {
- self.docked_right = docked_right;
- self
- }
-
pub fn worktree(mut self, worktree: impl Into<SharedString>) -> Self {
self.worktree = Some(worktree.into());
self
}
- pub fn highlight_positions(mut self, positions: Vec<usize>) -> Self {
- self.highlight_positions = positions;
+ pub fn worktree_full_path(mut self, worktree_full_path: impl Into<SharedString>) -> Self {
+ self.worktree_full_path = Some(worktree_full_path.into());
self
}
@@ -160,11 +179,6 @@ impl ThreadItem {
self
}
- pub fn title_label_color(mut self, color: Color) -> Self {
- self.title_label_color = Some(color);
- self
- }
-
pub fn action_slot(mut self, element: impl IntoElement) -> Self {
self.action_slot = Some(element.into_any_element());
self
@@ -179,6 +193,26 @@ impl ThreadItem {
impl RenderOnce for ThreadItem {
fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement {
let color = cx.theme().colors();
+ let base_bg = color
+ .title_bar_background
+ .blend(color.panel_background.opacity(0.2));
+
+ let base_bg = if self.selected {
+ color.element_active
+ } else {
+ base_bg
+ };
+
+ let hover_color = color
+ .element_active
+ .blend(color.element_background.opacity(0.2));
+
+ let gradient_overlay = GradientFade::new(base_bg, hover_color, hover_color)
+ .width(px(64.0))
+ .right(px(-10.0))
+ .gradient_stop(0.75)
+ .group_name("thread-item");
+
let dot_separator = || {
Label::new("•")
.size(LabelSize::Small)
@@ -186,19 +220,27 @@ impl RenderOnce for ThreadItem {
.alpha(0.5)
};
- let icon_container = || h_flex().size_4().flex_none().justify_center();
+ let icon_id = format!("icon-{}", self.id);
+ let icon_visible = self.icon_visible;
+ let icon_container = || {
+ h_flex()
+ .id(icon_id.clone())
+ .size_4()
+ .flex_none()
+ .justify_center()
+ .when(!icon_visible, |this| this.invisible())
+ };
+ let icon_color = self.icon_color.unwrap_or(Color::Muted);
let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg {
Icon::from_external_svg(custom_svg)
- .color(Color::Muted)
+ .color(icon_color)
.size(IconSize::Small)
} else {
- Icon::new(self.icon)
- .color(Color::Muted)
- .size(IconSize::Small)
+ Icon::new(self.icon).color(icon_color).size(IconSize::Small)
};
let decoration = |icon: IconDecorationKind, color: Hsla| {
- IconDecoration::new(icon, cx.theme().colors().surface_background, cx)
+ IconDecoration::new(icon, base_bg, cx)
.color(color)
.position(gpui::Point {
x: px(-2.),
@@ -206,12 +248,26 @@ impl RenderOnce for ThreadItem {
})
};
- let decoration = if self.status == AgentThreadStatus::Error {
- Some(decoration(IconDecorationKind::X, cx.theme().status().error))
+ let (decoration, icon_tooltip) = if self.status == AgentThreadStatus::Error {
+ (
+ Some(decoration(IconDecorationKind::X, cx.theme().status().error)),
+ Some("Thread has an Error"),
+ )
+ } else if self.status == AgentThreadStatus::WaitingForConfirmation {
+ (
+ Some(decoration(
+ IconDecorationKind::Triangle,
+ cx.theme().status().warning,
+ )),
+ Some("Thread is Waiting for Confirmation"),
+ )
} else if self.notified {
- Some(decoration(IconDecorationKind::Dot, color.text_accent))
+ (
+ Some(decoration(IconDecorationKind::Dot, color.text_accent)),
+ Some("Thread's Generation is Complete"),
+ )
} else {
- None
+ (None, None)
};
let icon = if self.status == AgentThreadStatus::Running {
@@ -223,19 +279,12 @@ impl RenderOnce for ThreadItem {
.with_rotate_animation(2),
)
.into_any_element()
- } else if self.status == AgentThreadStatus::WaitingForConfirmation {
- icon_container()
- .id("waiting-for-confirmation")
- .child(
- Icon::new(IconName::Warning)
- .size(IconSize::Small)
- .color(Color::Warning),
- )
- .tooltip(Tooltip::text("Waiting for confirmation"))
- .into_any_element()
} else if let Some(decoration) = decoration {
icon_container()
.child(DecoratedIcon::new(agent_icon, Some(decoration)))
+ .when_some(icon_tooltip, |icon, tooltip| {
+ icon.tooltip(Tooltip::text(tooltip))
+ })
.into_any_element()
} else {
icon_container().child(agent_icon).into_any_element()
@@ -243,7 +292,8 @@ impl RenderOnce for ThreadItem {
let title = self.title;
let highlight_positions = self.highlight_positions;
- let title_label = if self.generating_title {
+
+ let title_label = if self.title_generating {
Label::new(title)
.color(Color::Muted)
.with_animation(
@@ -255,65 +305,38 @@ impl RenderOnce for ThreadItem {
)
.into_any_element()
} else if highlight_positions.is_empty() {
- let label = Label::new(title);
- let label = if let Some(color) = self.title_label_color {
- label.color(color)
- } else {
- label
- };
- label.into_any_element()
- } else {
- let label = HighlightedLabel::new(title, highlight_positions);
- let label = if let Some(color) = self.title_label_color {
- label.color(color)
- } else {
- label
- };
- label.into_any_element()
- };
-
- let b_bg = color
- .title_bar_background
- .blend(color.panel_background.opacity(0.8));
-
- let base_bg = if self.selected {
- color.element_active
+ Label::new(title)
+ .when_some(self.title_label_color, |label, color| label.color(color))
+ .into_any_element()
} else {
- b_bg
+ HighlightedLabel::new(title, highlight_positions)
+ .when_some(self.title_label_color, |label, color| label.color(color))
+ .into_any_element()
};
- let gradient_overlay =
- GradientFade::new(base_bg, color.element_hover, color.element_active)
- .width(px(64.0))
- .right(px(-10.0))
- .gradient_stop(0.75)
- .group_name("thread-item");
-
let has_diff_stats = self.added.is_some() || self.removed.is_some();
+ let diff_stat_id = self.id.clone();
let added_count = self.added.unwrap_or(0);
let removed_count = self.removed.unwrap_or(0);
- let diff_stat_id = self.id.clone();
+
let has_worktree = self.worktree.is_some();
let has_timestamp = !self.timestamp.is_empty();
let timestamp = self.timestamp;
v_flex()
.id(self.id.clone())
+ .cursor_pointer()
.group("thread-item")
.relative()
.overflow_hidden()
- .cursor_pointer()
.w_full()
- .p_1()
+ .py_1()
+ .px_1p5()
.when(self.selected, |s| s.bg(color.element_active))
.border_1()
.border_color(gpui::transparent_black())
- .when(self.focused, |s| {
- s.when(self.docked_right, |s| s.border_r_2())
- .border_color(color.border_focused)
- })
- .hover(|s| s.bg(color.element_hover))
- .active(|s| s.bg(color.element_active))
+ .when(self.focused, |s| s.border_color(color.border_focused))
+ .hover(|s| s.bg(hover_color))
.on_hover(self.on_hover)
.child(
h_flex()
@@ -334,15 +357,11 @@ impl RenderOnce for ThreadItem {
.child(gradient_overlay)
.when(self.hovered, |this| {
this.when_some(self.action_slot, |this, slot| {
- let overlay = GradientFade::new(
- base_bg,
- color.element_hover,
- color.element_active,
- )
- .width(px(64.0))
- .right(px(6.))
- .gradient_stop(0.75)
- .group_name("thread-item");
+ let overlay = GradientFade::new(base_bg, hover_color, hover_color)
+ .width(px(64.0))
+ .right(px(6.))
+ .gradient_stop(0.75)
+ .group_name("thread-item");
this.child(
h_flex()
@@ -356,57 +375,56 @@ impl RenderOnce for ThreadItem {
})
}),
)
- .when_some(self.worktree, |this, worktree| {
- let worktree_highlight_positions = self.worktree_highlight_positions;
- let worktree_label = if worktree_highlight_positions.is_empty() {
- Label::new(worktree)
- .size(LabelSize::Small)
- .color(Color::Muted)
- .into_any_element()
- } else {
- HighlightedLabel::new(worktree, worktree_highlight_positions)
- .size(LabelSize::Small)
- .color(Color::Muted)
- .into_any_element()
- };
+ .when(has_worktree || has_diff_stats || has_timestamp, |this| {
+ let worktree_full_path = self.worktree_full_path.clone().unwrap_or_default();
+ let worktree_label = self.worktree.map(|worktree| {
+ let positions = self.worktree_highlight_positions;
+ if positions.is_empty() {
+ Label::new(worktree)
+ .size(LabelSize::Small)
+ .color(Color::Muted)
+ .into_any_element()
+ } else {
+ HighlightedLabel::new(worktree, positions)
+ .size(LabelSize::Small)
+ .color(Color::Muted)
+ .into_any_element()
+ }
+ });
this.child(
h_flex()
.min_w_0()
.gap_1p5()
.child(icon_container()) // Icon Spacing
- .child(worktree_label)
- .when(has_diff_stats || has_timestamp, |this| {
- this.child(dot_separator())
- })
- .when(has_diff_stats, |this| {
+ .when_some(worktree_label, |this, label| {
this.child(
- DiffStat::new(diff_stat_id.clone(), added_count, removed_count)
- .tooltip("Unreviewed changes"),
+ h_flex()
+ .id(format!("{}-worktree", self.id.clone()))
+ .gap_1()
+ .child(
+ Icon::new(IconName::GitWorktree)
+ .size(IconSize::XSmall)
+ .color(Color::Muted),
+ )
+ .child(label)
+ .tooltip(move |_, cx| {
+ Tooltip::with_meta(
+ "Thread Running in a Local Git Worktree",
+ None,
+ worktree_full_path.clone(),
+ cx,
+ )
+ }),
)
})
- .when(has_diff_stats && has_timestamp, |this| {
+ .when(has_worktree && (has_diff_stats || has_timestamp), |this| {
this.child(dot_separator())
})
- .when(has_timestamp, |this| {
- this.child(
- Label::new(timestamp.clone())
- .size(LabelSize::Small)
- .color(Color::Muted),
- )
- }),
- )
- })
- .when(!has_worktree && (has_diff_stats || has_timestamp), |this| {
- this.child(
- h_flex()
- .min_w_0()
- .gap_1p5()
- .child(icon_container()) // Icon Spacing
.when(has_diff_stats, |this| {
this.child(
DiffStat::new(diff_stat_id, added_count, removed_count)
- .tooltip("Unreviewed Changes"),
+ .tooltip("Unreviewed changes"),
)
})
.when(has_diff_stats && has_timestamp, |this| {
@@ -559,18 +577,6 @@ impl Component for ThreadItem {
)
.into_any_element(),
),
- single_example(
- "Focused + Docked Right",
- container()
- .child(
- ThreadItem::new("ti-7b", "Focused with right dock border")
- .icon(IconName::AiClaude)
- .timestamp("1w")
- .focused(true)
- .docked_right(true),
- )
- .into_any_element(),
- ),
single_example(
"Selected + Focused",
container()
@@ -692,10 +692,20 @@ impl ContextMenu {
}
pub fn action_checked(
+ self,
+ label: impl Into<SharedString>,
+ action: Box<dyn Action>,
+ checked: bool,
+ ) -> Self {
+ self.action_checked_with_disabled(label, action, checked, false)
+ }
+
+ pub fn action_checked_with_disabled(
mut self,
label: impl Into<SharedString>,
action: Box<dyn Action>,
checked: bool,
+ disabled: bool,
) -> Self {
self.items.push(ContextMenuItem::Entry(ContextMenuEntry {
toggle: if checked {
@@ -718,7 +728,7 @@ impl ContextMenu {
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
- disabled: false,
+ disabled,
documentation_aside: None,
end_slot_icon: None,
end_slot_title: None,
@@ -1,6 +1,6 @@
use std::ops::Range;
-use gpui::{FontWeight, HighlightStyle, StyledText};
+use gpui::{FontWeight, HighlightStyle, StyleRefinement, StyledText};
use crate::{LabelCommon, LabelLike, LabelSize, LineHeightStyle, prelude::*};
@@ -38,6 +38,40 @@ impl HighlightedLabel {
}
}
+impl HighlightedLabel {
+ fn style(&mut self) -> &mut StyleRefinement {
+ self.base.base.style()
+ }
+
+ pub fn flex_1(mut self) -> Self {
+ self.style().flex_grow = Some(1.);
+ self.style().flex_shrink = Some(1.);
+ self.style().flex_basis = Some(gpui::relative(0.).into());
+ self
+ }
+
+ pub fn flex_none(mut self) -> Self {
+ self.style().flex_grow = Some(0.);
+ self.style().flex_shrink = Some(0.);
+ self
+ }
+
+ pub fn flex_grow(mut self) -> Self {
+ self.style().flex_grow = Some(1.);
+ self
+ }
+
+ pub fn flex_shrink(mut self) -> Self {
+ self.style().flex_shrink = Some(1.);
+ self
+ }
+
+ pub fn flex_shrink_0(mut self) -> Self {
+ self.style().flex_shrink = Some(0.);
+ self
+ }
+}
+
impl LabelCommon for HighlightedLabel {
fn size(mut self, size: LabelSize) -> Self {
self.base = self.base.size(size);
@@ -4,7 +4,7 @@ use component::{Component, ComponentScope, example_group_with_title, single_exam
use gpui::{AnyElement, AnyView, ClickEvent, MouseButton, MouseDownEvent, Pixels, px};
use smallvec::SmallVec;
-use crate::{Disclosure, GradientFade, prelude::*};
+use crate::{Disclosure, prelude::*};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)]
pub enum ListItemSpacing {
@@ -31,9 +31,6 @@ pub struct ListItem {
/// A slot for content that appears on hover after the children
/// It will obscure the `end_slot` when visible.
end_hover_slot: Option<AnyElement>,
- /// When true, renders a gradient fade overlay before the `end_hover_slot`
- /// to smoothly truncate overflowing content.
- end_hover_gradient_overlay: bool,
toggle: Option<bool>,
inset: bool,
on_click: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
@@ -49,6 +46,7 @@ pub struct ListItem {
overflow_x: bool,
focused: Option<bool>,
docked_right: bool,
+ height: Option<Pixels>,
}
impl ListItem {
@@ -64,7 +62,6 @@ impl ListItem {
start_slot: None,
end_slot: None,
end_hover_slot: None,
- end_hover_gradient_overlay: false,
toggle: None,
inset: false,
on_click: None,
@@ -80,6 +77,7 @@ impl ListItem {
overflow_x: false,
focused: None,
docked_right: false,
+ height: None,
}
}
@@ -172,11 +170,6 @@ impl ListItem {
self
}
- pub fn end_hover_gradient_overlay(mut self, show: bool) -> Self {
- self.end_hover_gradient_overlay = show;
- self
- }
-
pub fn outlined(mut self) -> Self {
self.outlined = true;
self
@@ -201,6 +194,11 @@ impl ListItem {
self.docked_right = docked_right;
self
}
+
+ pub fn height(mut self, height: Pixels) -> Self {
+ self.height = Some(height);
+ self
+ }
}
impl Disableable for ListItem {
@@ -225,25 +223,11 @@ impl ParentElement for ListItem {
impl RenderOnce for ListItem {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
- let color = cx.theme().colors();
-
- let base_bg = if self.selected {
- color.element_active
- } else {
- color.panel_background
- };
-
- let end_hover_gradient_overlay =
- GradientFade::new(base_bg, color.element_hover, color.element_active)
- .width(px(96.0))
- .when_some(self.group_name.clone(), |fade, group| {
- fade.group_name(group)
- });
-
h_flex()
.id(self.id)
.when_some(self.group_name, |this, group| this.group(group))
.w_full()
+ .when_some(self.height, |this, height| this.h(height))
.relative()
// When an item is inset draw the indent spacing outside of the item
.when(self.inset, |this| {
@@ -285,26 +269,21 @@ impl RenderOnce for ListItem {
ListItemSpacing::Sparse => this.py_1(),
})
.when(self.inset && !self.disabled, |this| {
- this
- // TODO: Add focus state
- //.when(self.state == InteractionState::Focused, |this| {
- .when_some(self.focused, |this, focused| {
- if focused {
- this.border_1()
- .border_color(cx.theme().colors().border_focused)
- } else {
- this.border_1()
- }
- })
- .when(self.selectable, |this| {
- this.hover(|style| {
- style.bg(cx.theme().colors().ghost_element_hover)
- })
+ this.when_some(self.focused, |this, focused| {
+ if focused {
+ this.border_1()
+ .border_color(cx.theme().colors().border_focused)
+ } else {
+ this.border_1()
+ }
+ })
+ .when(self.selectable, |this| {
+ this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover))
.active(|style| style.bg(cx.theme().colors().ghost_element_active))
.when(self.selected, |this| {
this.bg(cx.theme().colors().ghost_element_selected)
})
- })
+ })
})
.when_some(
self.on_click.filter(|_| !self.disabled),
@@ -379,9 +358,6 @@ impl RenderOnce for ListItem {
.right(DynamicSpacing::Base06.rems(cx))
.top_0()
.visible_on_hover("list_item")
- .when(self.end_hover_gradient_overlay, |this| {
- this.child(end_hover_gradient_overlay)
- })
.child(end_hover_slot),
)
}),
@@ -23,3 +23,14 @@ pub use with_rem_size::*;
pub fn is_light(cx: &mut App) -> bool {
cx.theme().appearance.is_light()
}
+
+/// Returns the platform-appropriate label for the "reveal in file manager" action.
+pub fn reveal_in_file_manager_label(is_remote: bool) -> &'static str {
+ if cfg!(target_os = "macos") && !is_remote {
+ "Reveal in Finder"
+ } else if cfg!(target_os = "windows") && !is_remote {
+ "Reveal in File Explorer"
+ } else {
+ "Reveal in File Manager"
+ }
+}
@@ -73,13 +73,27 @@ async fn capture_unix(
command.arg("-l");
}
}
+
+ match shell_kind {
+ // Nushell does not allow non-interactive login shells.
+ // Instead of doing "-l -i -c '<command>'"
+ // use "-l -e '<command>; exit'" instead
+ ShellKind::Nushell => command.arg("-e"),
+ _ => command.args(["-i", "-c"]),
+ };
+
// cd into the directory, triggering directory specific side-effects (asdf, direnv, etc)
command_string.push_str(&format!("cd '{}';", directory.display()));
if let Some(prefix) = shell_kind.command_prefix() {
command_string.push(prefix);
}
command_string.push_str(&format!("{} --printenv {}", zed_path, redir));
- command.args(["-i", "-c", &command_string]);
+
+ if let ShellKind::Nushell = shell_kind {
+ command_string.push_str("; exit");
+ }
+
+ command.arg(&command_string);
super::set_pre_exec_to_start_new_session(&mut command);
@@ -28,7 +28,7 @@ use std::{
sync::OnceLock,
time::Instant,
};
-use task::{HideStrategy, RevealStrategy, SpawnInTerminal, TaskId};
+use task::{HideStrategy, RevealStrategy, SaveStrategy, SpawnInTerminal, TaskId};
use ui::ActiveTheme;
use util::{
ResultExt,
@@ -47,6 +47,7 @@ use crate::{
search::{FindCommand, ReplaceCommand, Replacement},
},
object::Object,
+ rewrap::Rewrap,
state::{Mark, Mode},
visual::VisualDeleteLine,
};
@@ -1725,6 +1726,7 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
)
.range(wrap_count),
VimCommand::new(("j", "oin"), JoinLines).range(select_range),
+ VimCommand::new(("reflow", ""), Rewrap).range(select_range),
VimCommand::new(("fo", "ld"), editor::actions::FoldSelectedRanges).range(act_on_range),
VimCommand::new(("foldo", "pen"), editor::actions::UnfoldLines)
.bang(editor::actions::UnfoldRecursive)
@@ -2479,6 +2481,7 @@ impl ShellExec {
show_summary: false,
show_command: false,
show_rerun: false,
+ save: SaveStrategy::default(),
};
let task_status = workspace.spawn_in_terminal(spawn_in_terminal, window, cx);
@@ -3536,4 +3539,53 @@ mod test {
Mode::Normal,
);
}
+
+ #[gpui::test]
+ async fn test_reflow(cx: &mut TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.update_editor(|editor, _window, cx| {
+ editor.set_hard_wrap(Some(10), cx);
+ });
+
+ cx.set_state(
+ indoc! {"
+ ˇ0123456789 0123456789 0123456789 0123456789
+ "},
+ Mode::Normal,
+ );
+
+ cx.simulate_keystrokes(": reflow");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ 0123456789
+ 0123456789
+ 0123456789
+ ˇ0123456789
+ "},
+ Mode::Normal,
+ );
+
+ cx.set_state(
+ indoc! {"
+ «0123456789 0123456789ˇ»
+ 0123456789 0123456789
+ "},
+ Mode::VisualLine,
+ );
+
+ cx.simulate_keystrokes(": reflow");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ ˇ0123456789
+ 0123456789
+ 0123456789 0123456789
+ "},
+ Mode::Normal,
+ );
+ }
}
@@ -2,11 +2,19 @@ use std::ops::Range;
use editor::{DisplayPoint, MultiBufferOffset, display_map::DisplaySnapshot};
use gpui::Context;
+use language::PointUtf16;
+use multi_buffer::MultiBufferRow;
use text::Bias;
use ui::Window;
use crate::Vim;
+#[derive(Copy, Clone)]
+enum Direction {
+ Above,
+ Below,
+}
+
impl Vim {
/// Creates a duplicate of every selection below it in the first place that has both its start
/// and end
@@ -16,14 +24,7 @@ impl Vim {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.duplicate_selections(
- times,
- window,
- cx,
- &|prev_point| *prev_point.row_mut() += 1,
- &|prev_range, map| prev_range.end.row() >= map.max_point().row(),
- false,
- );
+ self.duplicate_selections(times, window, cx, Direction::Below);
}
/// Creates a duplicate of every selection above it in the first place that has both its start
@@ -34,14 +35,7 @@ impl Vim {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.duplicate_selections(
- times,
- window,
- cx,
- &|prev_point| *prev_point.row_mut() = prev_point.row().0.saturating_sub(1),
- &|prev_range, _| prev_range.start.row() == DisplayPoint::zero().row(),
- true,
- );
+ self.duplicate_selections(times, window, cx, Direction::Above);
}
fn duplicate_selections(
@@ -49,9 +43,7 @@ impl Vim {
times: Option<usize>,
window: &mut Window,
cx: &mut Context<Self>,
- advance_search: &dyn Fn(&mut DisplayPoint),
- end_search: &dyn Fn(&Range<DisplayPoint>, &DisplaySnapshot) -> bool,
- above: bool,
+ direction: Direction,
) {
let times = times.unwrap_or(1);
self.update_editor(cx, |_, editor, cx| {
@@ -59,7 +51,7 @@ impl Vim {
let map = editor.display_snapshot(cx);
let mut original_selections = editor.selections.all_display(&map);
// The order matters, because it is recorded when the selections are added.
- if above {
+ if matches!(direction, Direction::Above) {
original_selections.reverse();
}
@@ -68,12 +60,9 @@ impl Vim {
selections.push(display_point_range_to_offset_range(&origin, &map));
let mut last_origin = origin;
for _ in 1..=times {
- if let Some(duplicate) = find_next_valid_duplicate_space(
- last_origin.clone(),
- &map,
- &advance_search,
- &end_search,
- ) {
+ if let Some(duplicate) =
+ find_next_valid_duplicate_space(last_origin.clone(), &map, direction)
+ {
selections.push(display_point_range_to_offset_range(&duplicate, &map));
last_origin = duplicate;
} else {
@@ -90,22 +79,62 @@ impl Vim {
}
fn find_next_valid_duplicate_space(
- mut origin: Range<DisplayPoint>,
+ origin: Range<DisplayPoint>,
map: &DisplaySnapshot,
- advance_search: &impl Fn(&mut DisplayPoint),
- end_search: &impl Fn(&Range<DisplayPoint>, &DisplaySnapshot) -> bool,
+ direction: Direction,
) -> Option<Range<DisplayPoint>> {
- while !end_search(&origin, map) {
- advance_search(&mut origin.start);
- advance_search(&mut origin.end);
+ let buffer = map.buffer_snapshot();
+ let start_col_utf16 = buffer
+ .point_to_point_utf16(origin.start.to_point(map))
+ .column;
+ let end_col_utf16 = buffer.point_to_point_utf16(origin.end.to_point(map)).column;
- if map.clip_point(origin.start, Bias::Left) == origin.start
- && map.clip_point(origin.end, Bias::Right) == origin.end
+ let mut candidate = origin;
+ loop {
+ match direction {
+ Direction::Below => {
+ if candidate.end.row() >= map.max_point().row() {
+ return None;
+ }
+ *candidate.start.row_mut() += 1;
+ *candidate.end.row_mut() += 1;
+ }
+ Direction::Above => {
+ if candidate.start.row() == DisplayPoint::zero().row() {
+ return None;
+ }
+ *candidate.start.row_mut() = candidate.start.row().0.saturating_sub(1);
+ *candidate.end.row_mut() = candidate.end.row().0.saturating_sub(1);
+ }
+ }
+
+ let start_row = DisplayPoint::new(candidate.start.row(), 0)
+ .to_point(map)
+ .row;
+ let end_row = DisplayPoint::new(candidate.end.row(), 0).to_point(map).row;
+
+ if start_col_utf16 > buffer.line_len_utf16(MultiBufferRow(start_row))
+ || end_col_utf16 > buffer.line_len_utf16(MultiBufferRow(end_row))
{
- return Some(origin);
+ continue;
+ }
+
+ let start_col = buffer
+ .point_utf16_to_point(PointUtf16::new(start_row, start_col_utf16))
+ .column;
+ let end_col = buffer
+ .point_utf16_to_point(PointUtf16::new(end_row, end_col_utf16))
+ .column;
+
+ let candidate_start = DisplayPoint::new(candidate.start.row(), start_col);
+ let candidate_end = DisplayPoint::new(candidate.end.row(), end_col);
+
+ if map.clip_point(candidate_start, Bias::Left) == candidate_start
+ && map.clip_point(candidate_end, Bias::Right) == candidate_end
+ {
+ return Some(candidate_start..candidate_end);
}
}
- None
}
fn display_point_range_to_offset_range(
@@ -231,4 +260,54 @@ mod tests {
Mode::HelixNormal,
);
}
+
+ #[gpui::test]
+ async fn test_selection_duplication_multiline_multibyte(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // Multiline selection on rows with multibyte chars should preserve
+ // the visual column on both start and end rows.
+ cx.set_state(
+ indoc! {"
+ «H䡻llo
+ Hëllo
+ Hallo"},
+ Mode::HelixNormal,
+ );
+
+ cx.simulate_keystrokes("C");
+
+ cx.assert_state(
+ indoc! {"
+ «H䡻llo
+ «H롻llo
+ Hallo"},
+ Mode::HelixNormal,
+ );
+ }
+
+ #[gpui::test]
+ async fn test_selection_duplication_multibyte(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // Selection on a line with multibyte chars should duplicate to the
+ // same character column on the next line, not skip it.
+ cx.set_state(
+ indoc! {"
+ H«äˇ»llo
+ Hallo"},
+ Mode::HelixNormal,
+ );
+
+ cx.simulate_keystrokes("C");
+
+ cx.assert_state(
+ indoc! {"
+ H«äˇ»llo
+ H«aˇ»llo"},
+ Mode::HelixNormal,
+ );
+ }
}
@@ -33,16 +33,14 @@ impl Vim {
let selected_register = vim.selected_register.take();
- let Some((text, clipboard_selections)) = Vim::update_globals(cx, |globals, cx| {
+ let Some(register) = Vim::update_globals(cx, |globals, cx| {
globals.read_register(selected_register, Some(editor), cx)
})
- .and_then(|reg| {
- (!reg.text.is_empty())
- .then_some(reg.text)
- .zip(reg.clipboard_selections)
- }) else {
+ .filter(|reg| !reg.text.is_empty()) else {
return;
};
+ let text = register.text;
+ let clipboard_selections = register.clipboard_selections;
let display_map = editor.display_snapshot(cx);
let current_selections = editor.selections.all_adjusted_display(&display_map);
@@ -63,7 +61,9 @@ impl Vim {
let mut replacement_texts: Vec<String> = Vec::new();
for ix in 0..current_selections.len() {
- let to_insert = if let Some(clip_sel) = clipboard_selections.get(ix) {
+ let to_insert = if let Some(clip_sel) =
+ clipboard_selections.as_ref().and_then(|s| s.get(ix))
+ {
let end_offset = start_offset + clip_sel.len;
let text = text[start_offset..end_offset].to_string();
start_offset = if clip_sel.is_entire_line {
@@ -102,13 +102,16 @@ impl Vim {
} else if action.before {
sel.start
} else if sel.start == sel.end {
- // Helix and Zed differ in how they understand
- // single-point cursors. In Helix, a single-point cursor
- // is "on top" of some character, and pasting after that
- // cursor means that the pasted content should go after
- // that character. (If the cursor is at the end of a
- // line, the pasted content goes on the next line.)
- movement::right(&display_map, sel.end)
+ // In Helix, a single-point cursor is "on top" of a
+ // character, and pasting after means after that character.
+ // At line end this means the next line. But on an empty
+ // line there is no character, so paste at the cursor.
+ let right = movement::right(&display_map, sel.end);
+ if right.row() != sel.end.row() && sel.end.column() == 0 {
+ sel.end
+ } else {
+ right
+ }
} else {
sel.end
};
@@ -146,8 +149,58 @@ impl Vim {
mod test {
use indoc::indoc;
+ use gpui::ClipboardItem;
+
use crate::{state::Mode, test::VimTestContext};
+ #[gpui::test]
+ async fn test_system_clipboard_paste(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+ cx.set_state(
+ indoc! {"
+ The quiˇck brown
+ fox jumps over
+ the lazy dog."},
+ Mode::HelixNormal,
+ );
+
+ cx.write_to_clipboard(ClipboardItem::new_string("clipboard".to_string()));
+ cx.simulate_keystrokes("p");
+ cx.assert_state(
+ indoc! {"
+ The quic«clipboardˇ»k brown
+ fox jumps over
+ the lazy dog."},
+ Mode::HelixNormal,
+ );
+
+ // Multiple cursors with system clipboard (no metadata) pastes
+ // the same text at each cursor.
+ cx.set_state(
+ indoc! {"
+ ˇThe quick brown
+ fox ˇjumps over
+ the lazy dog."},
+ Mode::HelixNormal,
+ );
+ cx.write_to_clipboard(ClipboardItem::new_string("hi".to_string()));
+ cx.simulate_keystrokes("p");
+ cx.assert_state(
+ indoc! {"
+ T«hiˇ»he quick brown
+ fox j«hiˇ»umps over
+ the lazy dog."},
+ Mode::HelixNormal,
+ );
+
+ // Multiple cursors on empty lines should paste on those same lines.
+ cx.set_state("ˇ\nˇ\nˇ\nend", Mode::HelixNormal);
+ cx.write_to_clipboard(ClipboardItem::new_string("X".to_string()));
+ cx.simulate_keystrokes("p");
+ cx.assert_state("«Xˇ»\n«Xˇ»\n«Xˇ»\nend", Mode::HelixNormal);
+ }
+
#[gpui::test]
async fn test_paste(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
@@ -88,82 +88,74 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
impl Vim {
fn scroll(
&mut self,
- move_cursor: bool,
+ preserve_cursor_position: bool,
window: &mut Window,
cx: &mut Context<Self>,
by: fn(c: Option<f32>) -> ScrollAmount,
) {
let amount = by(Vim::take_count(cx).map(|c| c as f32));
- let mode = self.mode;
Vim::take_forced_motion(cx);
self.exit_temporary_normal(window, cx);
- self.update_editor(cx, |_, editor, cx| {
- scroll_editor(editor, mode, move_cursor, amount, window, cx)
- });
+ self.scroll_editor(preserve_cursor_position, amount, window, cx);
}
-}
-fn scroll_editor(
- editor: &mut Editor,
- mode: Mode,
- preserve_cursor_position: bool,
- amount: ScrollAmount,
- window: &mut Window,
- cx: &mut Context<Editor>,
-) {
- let should_move_cursor = editor.newest_selection_on_screen(cx).is_eq();
- let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx));
- let old_top = editor
- .scroll_manager
- .scroll_top_display_point(&display_snapshot, cx);
-
- if editor.scroll_hover(amount, window, cx) {
- return;
- }
+ fn scroll_editor(
+ &mut self,
+ preserve_cursor_position: bool,
+ amount: ScrollAmount,
+ window: &mut Window,
+ cx: &mut Context<Vim>,
+ ) {
+ self.update_editor(cx, |vim, editor, cx| {
+ let should_move_cursor = editor.newest_selection_on_screen(cx).is_eq();
+ let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx));
+ let old_top = editor
+ .scroll_manager
+ .scroll_top_display_point(&display_snapshot, cx);
+
+ if editor.scroll_hover(amount, window, cx) {
+ return;
+ }
- let full_page_up = amount.is_full_page() && amount.direction().is_upwards();
- let amount = match (amount.is_full_page(), editor.visible_line_count()) {
- (true, Some(visible_line_count)) => {
- if amount.direction().is_upwards() {
- ScrollAmount::Line((amount.lines(visible_line_count) + 1.0) as f32)
- } else {
- ScrollAmount::Line((amount.lines(visible_line_count) - 1.0) as f32)
+ let full_page_up = amount.is_full_page() && amount.direction().is_upwards();
+ let amount = match (amount.is_full_page(), editor.visible_line_count()) {
+ (true, Some(visible_line_count)) => {
+ if amount.direction().is_upwards() {
+ ScrollAmount::Line((amount.lines(visible_line_count) + 1.0) as f32)
+ } else {
+ ScrollAmount::Line((amount.lines(visible_line_count) - 1.0) as f32)
+ }
+ }
+ _ => amount,
+ };
+
+ editor.scroll_screen(&amount, window, cx);
+ if !should_move_cursor {
+ return;
}
- }
- _ => amount,
- };
- editor.scroll_screen(&amount, window, cx);
- if !should_move_cursor {
- return;
- }
+ let Some(visible_line_count) = editor.visible_line_count() else {
+ return;
+ };
- let Some(visible_line_count) = editor.visible_line_count() else {
- return;
- };
+ let Some(visible_column_count) = editor.visible_column_count() else {
+ return;
+ };
- let Some(visible_column_count) = editor.visible_column_count() else {
- return;
- };
+ let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx));
+ let top = editor
+ .scroll_manager
+ .scroll_top_display_point(&display_snapshot, cx);
+ let vertical_scroll_margin = EditorSettings::get_global(cx).vertical_scroll_margin;
- let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx));
- let top = editor
- .scroll_manager
- .scroll_top_display_point(&display_snapshot, cx);
- let vertical_scroll_margin = EditorSettings::get_global(cx).vertical_scroll_margin;
-
- editor.change_selections(
- SelectionEffects::no_scroll().nav_history(false),
- window,
- cx,
- |s| {
- s.move_with(&mut |map, selection| {
+ let mut move_cursor = |map: &editor::display_map::DisplaySnapshot,
+ mut head: DisplayPoint,
+ goal: SelectionGoal| {
// TODO: Improve the logic and function calls below to be dependent on
// the `amount`. If the amount is vertical, we don't care about
// columns, while if it's horizontal, we don't care about rows,
// so we don't need to calculate both and deal with logic for
// both.
- let mut head = selection.head();
let max_point = map.max_point();
let starting_column = head.column();
@@ -171,17 +163,18 @@ fn scroll_editor(
(vertical_scroll_margin as u32).min(visible_line_count as u32 / 2);
if preserve_cursor_position {
- let new_row = if old_top.row() == top.row() {
- DisplayRow(
- head.row()
- .0
- .saturating_add_signed(amount.lines(visible_line_count) as i32),
- )
- } else {
- DisplayRow(top.row().0.saturating_add_signed(
- selection.head().row().0 as i32 - old_top.row().0 as i32,
- ))
- };
+ let new_row =
+ if old_top.row() == top.row() {
+ DisplayRow(
+ head.row()
+ .0
+ .saturating_add_signed(amount.lines(visible_line_count) as i32),
+ )
+ } else {
+ DisplayRow(top.row().0.saturating_add_signed(
+ head.row().0 as i32 - old_top.row().0 as i32,
+ ))
+ };
head = map.clip_point(DisplayPoint::new(new_row, head.column()), Bias::Left)
}
@@ -259,17 +252,36 @@ fn scroll_editor(
let new_head = map.clip_point(DisplayPoint::new(new_row, new_column), Bias::Left);
let goal = match amount {
ScrollAmount::Column(_) | ScrollAmount::PageWidth(_) => SelectionGoal::None,
- _ => selection.goal,
+ _ => goal,
};
- if selection.is_empty() || !mode.is_visual() {
- selection.collapse_to(new_head, goal)
- } else {
- selection.set_head(new_head, goal)
- };
- })
- },
- );
+ Some((new_head, goal))
+ };
+
+ if vim.mode == Mode::VisualBlock {
+ vim.visual_block_motion(true, editor, window, cx, &mut move_cursor);
+ } else {
+ editor.change_selections(
+ SelectionEffects::no_scroll().nav_history(false),
+ window,
+ cx,
+ |s| {
+ s.move_with(&mut |map, selection| {
+ if let Some((new_head, goal)) =
+ move_cursor(map, selection.head(), selection.goal)
+ {
+ if selection.is_empty() || !vim.mode.is_visual() {
+ selection.collapse_to(new_head, goal)
+ } else {
+ selection.set_head(new_head, goal)
+ }
+ }
+ })
+ },
+ );
+ }
+ });
+ }
}
#[cfg(test)]
@@ -282,12 +282,12 @@ impl Vim {
/// Pastes the clipboard contents, replacing the same number of characters
/// as the clipboard's contents.
pub fn paste_replace(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let clipboard_text =
- cx.read_from_clipboard()
- .and_then(|item| match item.entries().first() {
- Some(ClipboardEntry::String(text)) => Some(text.text().to_string()),
- _ => None,
- });
+ let clipboard_text = cx.read_from_clipboard().and_then(|item| {
+ item.entries().iter().find_map(|entry| match entry {
+ ClipboardEntry::String(text) => Some(text.text().to_string()),
+ _ => None,
+ })
+ });
if let Some(text) = clipboard_text {
self.push_operator(Operator::Replace, window, cx);
@@ -191,14 +191,15 @@ impl From<Register> for ClipboardItem {
impl From<ClipboardItem> for Register {
fn from(item: ClipboardItem) -> Self {
- // For now, we don't store metadata for multiple entries.
- match item.entries().first() {
- Some(ClipboardEntry::String(value)) if item.entries().len() == 1 => Register {
+ match item.entries().iter().find_map(|entry| match entry {
+ ClipboardEntry::String(value) => Some(value),
+ _ => None,
+ }) {
+ Some(value) => Register {
text: value.text().to_owned().into(),
clipboard_selections: value.metadata_json::<Vec<ClipboardSelection>>(),
},
- // For now, registers can't store images. This could change in the future.
- _ => Register::default(),
+ None => Register::default(),
}
}
}
@@ -322,10 +323,11 @@ impl MarksState {
let Some(workspace_id) = this.update(cx, |this, cx| this.workspace_id(cx)).ok()? else {
return None;
};
+ let db = cx.update(|cx| VimDb::global(cx));
let (marks, paths) = cx
.background_spawn(async move {
- let marks = DB.get_marks(workspace_id)?;
- let paths = DB.get_global_marks_paths(workspace_id)?;
+ let marks = db.get_marks(workspace_id)?;
+ let paths = db.get_global_marks_paths(workspace_id)?;
anyhow::Ok((marks, paths))
})
.await
@@ -444,8 +446,9 @@ impl MarksState {
if let Some(workspace_id) = self.workspace_id(cx) {
let path = path.clone();
let key = key.clone();
+ let db = VimDb::global(cx);
cx.background_spawn(async move {
- DB.set_global_mark_path(workspace_id, key, path).await
+ db.set_global_mark_path(workspace_id, key, path).await
})
.detach_and_log_err(cx);
}
@@ -461,8 +464,9 @@ impl MarksState {
self.serialized_marks.insert(path.clone(), new_points);
if let Some(workspace_id) = self.workspace_id(cx) {
+ let db = VimDb::global(cx);
cx.background_spawn(async move {
- DB.set_marks(workspace_id, path.clone(), to_write).await?;
+ db.set_marks(workspace_id, path.clone(), to_write).await?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
@@ -655,8 +659,9 @@ impl MarksState {
let path = if let Some(target) = self.global_marks.get(&mark_name.clone()) {
let name = mark_name.clone();
if let Some(workspace_id) = self.workspace_id(cx) {
+ let db = VimDb::global(cx);
cx.background_spawn(async move {
- DB.delete_global_marks_path(workspace_id, name).await
+ db.delete_global_marks_path(workspace_id, name).await
})
.detach_and_log_err(cx);
}
@@ -696,7 +701,8 @@ impl MarksState {
.get_mut(&path)
.map(|m| m.remove(&mark_name.clone()));
if let Some(workspace_id) = self.workspace_id(cx) {
- cx.background_spawn(async move { DB.delete_mark(workspace_id, path, mark_name).await })
+ let db = VimDb::global(cx);
+ cx.background_spawn(async move { db.delete_mark(workspace_id, path, mark_name).await })
.detach_and_log_err(cx);
}
}
@@ -1764,7 +1770,7 @@ impl Domain for VimDb {
];
}
-db::static_connection!(DB, VimDb, [WorkspaceDb]);
+db::static_connection!(VimDb, [WorkspaceDb]);
struct SerializedMark {
path: Arc<Path>,
@@ -30,6 +30,7 @@ impl VimTestContext {
theme::init(theme::LoadThemes::JustBase, cx);
settings_ui::init(cx);
markdown_preview::init(cx);
+ zed_actions::init();
});
}
@@ -635,7 +635,7 @@ impl Vim {
fn activate(editor: &mut Editor, window: &mut Window, cx: &mut Context<Editor>) {
let vim = Vim::new(window, cx);
let state = vim.update(cx, |vim, cx| {
- if !editor.mode().is_full() {
+ if !editor.use_modal_editing() {
vim.mode = Mode::Insert;
}
@@ -1561,6 +1561,38 @@ mod test {
});
}
+ #[gpui::test]
+ async fn test_visual_block_insert_after_ctrl_d_scroll(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ let shared_state_lines = (1..=10)
+ .map(|line_number| format!("{line_number:02}"))
+ .collect::<Vec<_>>()
+ .join("\n");
+ let shared_state = format!("ˇ{shared_state_lines}\n");
+
+ cx.set_scroll_height(5).await;
+ cx.set_shared_state(&shared_state).await;
+
+ cx.simulate_shared_keystrokes("ctrl-v ctrl-d").await;
+ cx.shared_state().await.assert_matches();
+
+ cx.simulate_shared_keystrokes("shift-i x escape").await;
+ cx.shared_state().await.assert_eq(indoc! {
+ "
+ ˇx01
+ x02
+ x03
+ x04
+ x05
+ 06
+ 07
+ 08
+ 09
+ 10
+ "
+ });
+ }
+
#[gpui::test]
async fn test_visual_block_wrapping_selection(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
@@ -0,0 +1,10 @@
+{"SetOption":{"value":"scrolloff=3"}}
+{"SetOption":{"value":"lines=7"}}
+{"Put":{"state":"ˇ01\n02\n03\n04\n05\n06\n07\n08\n09\n10\n"}}
+{"Key":"ctrl-v"}
+{"Key":"ctrl-d"}
+{"Get":{"state":"«0ˇ»1\n«0ˇ»2\n«0ˇ»3\n«0ˇ»4\n«0ˇ»5\n06\n07\n08\n09\n10\n","mode":"VisualBlock"}}
+{"Key":"shift-i"}
+{"Key":"x"}
+{"Key":"escape"}
+{"Get":{"state":"ˇx01\nx02\nx03\nx04\nx05\n06\n07\n08\n09\n10\n","mode":"Normal"}}
@@ -12,4 +12,5 @@ workspace = true
path = "src/vim_mode_setting.rs"
[dependencies]
+gpui.workspace = true
settings.workspace = true
@@ -4,6 +4,7 @@
//! disable Vim/Helix modes without having to depend on the `vim` crate in its
//! entirety.
+use gpui::App;
use settings::{RegisterSetting, Settings, SettingsContent};
#[derive(RegisterSetting)]
@@ -15,9 +16,25 @@ impl Settings for VimModeSetting {
}
}
+impl VimModeSetting {
+ pub fn is_enabled(cx: &App) -> bool {
+ Self::try_get(cx)
+ .map(|vim_mode| vim_mode.0)
+ .unwrap_or(false)
+ }
+}
+
#[derive(RegisterSetting)]
pub struct HelixModeSetting(pub bool);
+impl HelixModeSetting {
+ pub fn is_enabled(cx: &App) -> bool {
+ Self::try_get(cx)
+ .map(|helix_mode| helix_mode.0)
+ .unwrap_or(false)
+ }
+}
+
impl Settings for HelixModeSetting {
fn from_settings(content: &SettingsContent) -> Self {
Self(content.helix_mode.unwrap())
@@ -65,6 +65,7 @@ theme.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
+vim_mode_setting.workspace = true
zed_actions.workspace = true
[target.'cfg(target_os = "windows")'.dependencies]
@@ -7,7 +7,8 @@ use ui::{App, Context};
use util::{ResultExt, paths::PathExt};
use crate::{
- NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId, path_list::PathList,
+ NewWindow, SerializedWorkspaceLocation, WorkspaceId, path_list::PathList,
+ persistence::WorkspaceDb,
};
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
@@ -40,8 +41,9 @@ impl HistoryManager {
}
fn init(this: Entity<HistoryManager>, fs: Arc<dyn Fs>, cx: &App) {
+ let db = WorkspaceDb::global(cx);
cx.spawn(async move |cx| {
- let recent_folders = WORKSPACE_DB
+ let recent_folders = db
.recent_workspaces_on_disk(fs.as_ref())
.await
.unwrap_or_default()
@@ -102,6 +104,7 @@ impl HistoryManager {
.map(|entry| entry.path.clone())
.collect::<Vec<_>>();
let user_removed = cx.update_jump_list(menus, entries);
+ let db = WorkspaceDb::global(cx);
cx.spawn(async move |this, cx| {
let user_removed = user_removed.await;
if user_removed.is_empty() {
@@ -119,7 +122,7 @@ impl HistoryManager {
}
}) {
for id in deleted_ids.iter() {
- WORKSPACE_DB.delete_workspace_by_id(*id).await.log_err();
+ db.delete_workspace_by_id(*id).await.log_err();
}
}
})
@@ -946,15 +946,29 @@ impl<T: Item> ItemHandle for Entity<T> {
// Only trigger autosave if focus has truly left the item.
// If focus is still within the item's hierarchy (e.g., moved to a context menu),
// don't trigger autosave to avoid unwanted formatting and cursor jumps.
- // Also skip autosave if focus moved to a modal (e.g., command palette),
- // since the user is still interacting with the workspace.
let focus_handle = item.item_focus_handle(cx);
- if !focus_handle.contains_focused(window, cx)
- && !workspace.has_active_modal(window, cx)
- {
- Pane::autosave_item(&item, workspace.project.clone(), window, cx)
- .detach_and_log_err(cx);
+ if focus_handle.contains_focused(window, cx) {
+ return;
}
+
+ let vim_mode = vim_mode_setting::VimModeSetting::is_enabled(cx);
+ let helix_mode = vim_mode_setting::HelixModeSetting::is_enabled(cx);
+
+ if vim_mode || helix_mode {
+ // We use the command palette for executing commands in Vim and Helix modes (e.g., `:w`), so
+ // in those cases we don't want to trigger auto-save if the focus has just been transferred
+ // to the command palette.
+ //
+ // This isn't totally perfect, as you could still switch files indirectly via the command
+ // palette (such as by opening up the tab switcher from it and then switching tabs that
+ // way).
+ if workspace.is_active_modal_command_palette(cx) {
+ return;
+ }
+ }
+
+ Pane::autosave_item(&item, workspace.project.clone(), window, cx)
+ .detach_and_log_err(cx);
}
},
)
@@ -26,6 +26,15 @@ pub trait ModalView: ManagedView {
fn render_bare(&self) -> bool {
false
}
+
+ /// Returns whether this [`ModalView`] is the command palette.
+ ///
+ /// This breaks the encapsulation of the [`ModalView`] trait a little bit, but there doesn't seem to be an
+ /// immediate, more elegant way to have the workspace know about the command palette (due to dependency arrow
+ /// directions).
+ fn is_command_palette(&self) -> bool {
+ false
+ }
}
trait ModalViewHandle {
@@ -33,6 +42,7 @@ trait ModalViewHandle {
fn view(&self) -> AnyView;
fn fade_out_background(&self, cx: &mut App) -> bool;
fn render_bare(&self, cx: &mut App) -> bool;
+ fn is_command_palette(&self, cx: &App) -> bool;
}
impl<V: ModalView> ModalViewHandle for Entity<V> {
@@ -51,6 +61,10 @@ impl<V: ModalView> ModalViewHandle for Entity<V> {
fn render_bare(&self, cx: &mut App) -> bool {
self.read(cx).render_bare()
}
+
+ fn is_command_palette(&self, cx: &App) -> bool {
+ self.read(cx).is_command_palette()
+ }
}
pub struct ActiveModal {
@@ -189,6 +203,13 @@ impl ModalLayer {
pub fn has_active_modal(&self) -> bool {
self.active_modal.is_some()
}
+
+ /// Returns whether the active modal is the command palette.
+ pub fn is_active_modal_command_palette(&self, cx: &App) -> bool {
+ self.active_modal
+ .as_ref()
+ .map_or(false, |modal| modal.modal.is_command_palette(cx))
+ }
}
impl Render for ModalLayer {
@@ -26,8 +26,14 @@ actions!(
[
/// Toggles the workspace switcher sidebar.
ToggleWorkspaceSidebar,
+ /// Closes the workspace sidebar.
+ CloseWorkspaceSidebar,
/// Moves focus to or from the workspace sidebar without closing it.
FocusWorkspaceSidebar,
+ /// Switches to the next workspace.
+ NextWorkspace,
+ /// Switches to the previous workspace.
+ PreviousWorkspace,
]
);
@@ -41,8 +47,10 @@ pub trait Sidebar: Focusable + Render + Sized {
fn width(&self, cx: &App) -> Pixels;
fn set_width(&mut self, width: Option<Pixels>, cx: &mut Context<Self>);
fn has_notifications(&self, cx: &App) -> bool;
- fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App);
- fn is_recent_projects_popover_deployed(&self) -> bool;
+
+ fn is_threads_list_view_active(&self) -> bool {
+ true
+ }
/// Makes focus reset bac to the search editor upon toggling the sidebar from outside
fn prepare_for_focus(&mut self, _window: &mut Window, _cx: &mut Context<Self>) {}
}
@@ -56,8 +64,8 @@ pub trait SidebarHandle: 'static + Send + Sync {
fn has_notifications(&self, cx: &App) -> bool;
fn to_any(&self) -> AnyView;
fn entity_id(&self) -> EntityId;
- fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App);
- fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool;
+
+ fn is_threads_list_view_active(&self, cx: &App) -> bool;
}
#[derive(Clone)]
@@ -103,14 +111,8 @@ impl<T: Sidebar> SidebarHandle for Entity<T> {
Entity::entity_id(self)
}
- fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) {
- self.update(cx, |this, cx| {
- this.toggle_recent_projects_popover(window, cx);
- });
- }
-
- fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool {
- self.read(cx).is_recent_projects_popover_deployed()
+ fn is_threads_list_view_active(&self, cx: &App) -> bool {
+ self.read(cx).is_threads_list_view_active()
}
}
@@ -161,7 +163,23 @@ impl MultiWorkspace {
}
}
- pub fn register_sidebar<T: Sidebar>(&mut self, sidebar: Entity<T>) {
+ pub fn register_sidebar<T: Sidebar>(&mut self, sidebar: Entity<T>, cx: &mut Context<Self>) {
+ self._subscriptions
+ .push(cx.observe(&sidebar, |this, _, cx| {
+ let has_notifications = this.sidebar_has_notifications(cx);
+ let is_open = this.sidebar_open;
+ let show_toggle = this.multi_workspace_enabled(cx);
+ for workspace in &this.workspaces {
+ workspace.update(cx, |workspace, cx| {
+ workspace.set_workspace_sidebar_open(
+ is_open,
+ has_notifications,
+ show_toggle,
+ cx,
+ );
+ });
+ }
+ }));
self.sidebar = Some(Box::new(sidebar));
}
@@ -179,16 +197,10 @@ impl MultiWorkspace {
.map_or(false, |s| s.has_notifications(cx))
}
- pub fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) {
- if let Some(sidebar) = &self.sidebar {
- sidebar.toggle_recent_projects_popover(window, cx);
- }
- }
-
- pub fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool {
+ pub fn is_threads_list_view_active(&self, cx: &App) -> bool {
self.sidebar
.as_ref()
- .map_or(false, |s| s.is_recent_projects_popover_deployed(cx))
+ .map_or(false, |s| s.is_threads_list_view_active(cx))
}
pub fn multi_workspace_enabled(&self, cx: &App) -> bool {
@@ -211,6 +223,16 @@ impl MultiWorkspace {
}
}
+ pub fn close_sidebar_action(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if !self.multi_workspace_enabled(cx) {
+ return;
+ }
+
+ if self.sidebar_open {
+ self.close_sidebar(window, cx);
+ }
+ }
+
pub fn focus_sidebar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if !self.multi_workspace_enabled(cx) {
return;
@@ -241,9 +263,13 @@ impl MultiWorkspace {
pub fn open_sidebar(&mut self, cx: &mut Context<Self>) {
self.sidebar_open = true;
+ let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx));
+ let has_notifications = self.sidebar_has_notifications(cx);
+ let show_toggle = self.multi_workspace_enabled(cx);
for workspace in &self.workspaces {
workspace.update(cx, |workspace, cx| {
- workspace.set_workspace_sidebar_open(true, cx);
+ workspace.set_workspace_sidebar_open(true, has_notifications, show_toggle, cx);
+ workspace.set_sidebar_focus_handle(sidebar_focus_handle.clone());
});
}
self.serialize(cx);
@@ -252,9 +278,12 @@ impl MultiWorkspace {
fn close_sidebar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.sidebar_open = false;
+ let has_notifications = self.sidebar_has_notifications(cx);
+ let show_toggle = self.multi_workspace_enabled(cx);
for workspace in &self.workspaces {
workspace.update(cx, |workspace, cx| {
- workspace.set_workspace_sidebar_open(false, cx);
+ workspace.set_workspace_sidebar_open(false, has_notifications, show_toggle, cx);
+ workspace.set_sidebar_focus_handle(None);
});
}
let pane = self.workspace().read(cx).active_pane().clone();
@@ -349,8 +378,12 @@ impl MultiWorkspace {
index
} else {
if self.sidebar_open {
+ let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx));
+ let has_notifications = self.sidebar_has_notifications(cx);
+ let show_toggle = self.multi_workspace_enabled(cx);
workspace.update(cx, |workspace, cx| {
- workspace.set_workspace_sidebar_open(true, cx);
+ workspace.set_workspace_sidebar_open(true, has_notifications, show_toggle, cx);
+ workspace.set_sidebar_focus_handle(sidebar_focus_handle);
});
}
Self::subscribe_to_workspace(&workspace, cx);
@@ -376,14 +409,38 @@ impl MultiWorkspace {
cx.notify();
}
+ fn cycle_workspace(&mut self, delta: isize, window: &mut Window, cx: &mut Context<Self>) {
+ let count = self.workspaces.len() as isize;
+ if count <= 1 {
+ return;
+ }
+ let current = self.active_workspace_index as isize;
+ let next = ((current + delta).rem_euclid(count)) as usize;
+ self.activate_index(next, window, cx);
+ }
+
+ fn next_workspace(&mut self, _: &NextWorkspace, window: &mut Window, cx: &mut Context<Self>) {
+ self.cycle_workspace(1, window, cx);
+ }
+
+ fn previous_workspace(
+ &mut self,
+ _: &PreviousWorkspace,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.cycle_workspace(-1, window, cx);
+ }
+
fn serialize(&mut self, cx: &mut App) {
let window_id = self.window_id;
let state = crate::persistence::model::MultiWorkspaceState {
active_workspace_id: self.workspace().read(cx).database_id(),
sidebar_open: self.sidebar_open,
};
+ let kvp = db::kvp::KeyValueStore::global(cx);
self._serialize_task = Some(cx.background_spawn(async move {
- crate::persistence::write_multi_workspace_state(window_id, state).await;
+ crate::persistence::write_multi_workspace_state(&kvp, window_id, state).await;
}));
}
@@ -555,8 +612,9 @@ impl MultiWorkspace {
self.focus_active_workspace(window, cx);
let weak_workspace = new_workspace.downgrade();
+ let db = crate::persistence::WorkspaceDb::global(cx);
cx.spawn_in(window, async move |this, cx| {
- let workspace_id = crate::persistence::DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
let workspace = weak_workspace.upgrade().unwrap();
let task: Task<()> = this
.update_in(cx, |this, window, cx| {
@@ -566,9 +624,9 @@ impl MultiWorkspace {
workspace.set_database_id(workspace_id);
});
this.serialize(cx);
+ let db = db.clone();
cx.background_spawn(async move {
- crate::persistence::DB
- .set_session_binding(workspace_id, session_id, Some(window_id))
+ db.set_session_binding(workspace_id, session_id, Some(window_id))
.await
.log_err();
})
@@ -592,13 +650,13 @@ impl MultiWorkspace {
}
if let Some(workspace_id) = removed_workspace.read(cx).database_id() {
+ let db = crate::persistence::WorkspaceDb::global(cx);
self.pending_removal_tasks.retain(|task| !task.is_ready());
self.pending_removal_tasks
.push(cx.background_spawn(async move {
// Clear the session binding instead of deleting the row so
// the workspace still appears in the recent-projects list.
- crate::persistence::DB
- .set_session_binding(workspace_id, None, None)
+ db.set_session_binding(workspace_id, None, None)
.await
.log_err();
}));
@@ -719,11 +777,18 @@ impl Render for MultiWorkspace {
this.toggle_sidebar(window, cx);
},
))
+ .on_action(cx.listener(
+ |this: &mut Self, _: &CloseWorkspaceSidebar, window, cx| {
+ this.close_sidebar_action(window, cx);
+ },
+ ))
.on_action(cx.listener(
|this: &mut Self, _: &FocusWorkspaceSidebar, window, cx| {
this.focus_sidebar(window, cx);
},
))
+ .on_action(cx.listener(Self::next_workspace))
+ .on_action(cx.listener(Self::previous_workspace))
})
.when(
self.sidebar_open() && self.multi_workspace_enabled(cx),
@@ -3192,6 +3192,7 @@ impl Pane {
});
let entry_abs_path = pane.read(cx).entry_abs_path(entry, cx);
+ let reveal_path = entry_abs_path.clone();
let parent_abs_path = entry_abs_path
.as_deref()
.and_then(|abs_path| Some(abs_path.parent()?.to_path_buf()));
@@ -3201,6 +3202,15 @@ impl Pane {
let visible_in_project_panel = relative_path.is_some()
&& worktree.is_some_and(|worktree| worktree.read(cx).is_visible());
+ let is_local = pane.read(cx).project.upgrade().is_some_and(|project| {
+ let project = project.read(cx);
+ project.is_local() || project.is_via_wsl_with_host_interop(cx)
+ });
+ let is_remote = pane
+ .read(cx)
+ .project
+ .upgrade()
+ .is_some_and(|project| project.read(cx).is_remote());
let entry_id = entry.to_proto();
@@ -3233,8 +3243,26 @@ impl Pane {
}),
)
})
+ .when(is_local, |menu| {
+ menu.when_some(reveal_path, |menu, reveal_path| {
+ menu.separator().entry(
+ ui::utils::reveal_in_file_manager_label(is_remote),
+ Some(Box::new(
+ zed_actions::editor::RevealInFileManager,
+ )),
+ window.handler_for(&pane, move |pane, _, cx| {
+ if let Some(project) = pane.project.upgrade() {
+ project.update(cx, |project, cx| {
+ project.reveal_path(&reveal_path, cx);
+ });
+ } else {
+ cx.reveal_path(&reveal_path);
+ }
+ }),
+ )
+ })
+ })
.map(pin_tab_entries)
- .separator()
.when(visible_in_project_panel, |menu| {
menu.entry(
"Reveal In Project Panel",
@@ -14,7 +14,7 @@ use fs::Fs;
use anyhow::{Context as _, Result, bail};
use collections::{HashMap, HashSet, IndexSet};
use db::{
- kvp::KEY_VALUE_STORE,
+ kvp::KeyValueStore,
query,
sqlez::{connection::Connection, domain::Domain},
sqlez_macros::sql,
@@ -174,8 +174,8 @@ impl Column for SerializedWindowBounds {
const DEFAULT_WINDOW_BOUNDS_KEY: &str = "default_window_bounds";
-pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> {
- let json_str = KEY_VALUE_STORE
+pub fn read_default_window_bounds(kvp: &KeyValueStore) -> Option<(Uuid, WindowBounds)> {
+ let json_str = kvp
.read_kvp(DEFAULT_WINDOW_BOUNDS_KEY)
.log_err()
.flatten()?;
@@ -186,13 +186,13 @@ pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> {
}
pub async fn write_default_window_bounds(
+ kvp: &KeyValueStore,
bounds: WindowBounds,
display_uuid: Uuid,
) -> anyhow::Result<()> {
let persisted = WindowBoundsJson::from(bounds);
let json_str = serde_json::to_string(&(display_uuid, persisted))?;
- KEY_VALUE_STORE
- .write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str)
+ kvp.write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str)
.await?;
Ok(())
}
@@ -290,12 +290,9 @@ impl From<WindowBoundsJson> for WindowBounds {
}
}
-fn multi_workspace_states() -> db::kvp::ScopedKeyValueStore<'static> {
- KEY_VALUE_STORE.scoped("multi_workspace_state")
-}
-
-fn read_multi_workspace_state(window_id: WindowId) -> model::MultiWorkspaceState {
- multi_workspace_states()
+fn read_multi_workspace_state(window_id: WindowId, cx: &App) -> model::MultiWorkspaceState {
+ let kvp = KeyValueStore::global(cx);
+ kvp.scoped("multi_workspace_state")
.read(&window_id.as_u64().to_string())
.log_err()
.flatten()
@@ -303,9 +300,13 @@ fn read_multi_workspace_state(window_id: WindowId) -> model::MultiWorkspaceState
.unwrap_or_default()
}
-pub async fn write_multi_workspace_state(window_id: WindowId, state: model::MultiWorkspaceState) {
+pub async fn write_multi_workspace_state(
+ kvp: &KeyValueStore,
+ window_id: WindowId,
+ state: model::MultiWorkspaceState,
+) {
if let Ok(json_str) = serde_json::to_string(&state) {
- multi_workspace_states()
+ kvp.scoped("multi_workspace_state")
.write(window_id.as_u64().to_string(), json_str)
.await
.log_err();
@@ -314,6 +315,7 @@ pub async fn write_multi_workspace_state(window_id: WindowId, state: model::Mult
pub fn read_serialized_multi_workspaces(
session_workspaces: Vec<model::SessionWorkspace>,
+ cx: &App,
) -> Vec<model::SerializedMultiWorkspace> {
let mut window_groups: Vec<Vec<model::SessionWorkspace>> = Vec::new();
let mut window_id_to_group: HashMap<WindowId, usize> = HashMap::default();
@@ -338,7 +340,7 @@ pub fn read_serialized_multi_workspaces(
.map(|group| {
let window_id = group.first().and_then(|sw| sw.window_id);
let state = window_id
- .map(read_multi_workspace_state)
+ .map(|wid| read_multi_workspace_state(wid, cx))
.unwrap_or_default();
model::SerializedMultiWorkspace {
workspaces: group,
@@ -350,19 +352,18 @@ pub fn read_serialized_multi_workspaces(
const DEFAULT_DOCK_STATE_KEY: &str = "default_dock_state";
-pub fn read_default_dock_state() -> Option<DockStructure> {
- let json_str = KEY_VALUE_STORE
- .read_kvp(DEFAULT_DOCK_STATE_KEY)
- .log_err()
- .flatten()?;
+pub fn read_default_dock_state(kvp: &KeyValueStore) -> Option<DockStructure> {
+ let json_str = kvp.read_kvp(DEFAULT_DOCK_STATE_KEY).log_err().flatten()?;
serde_json::from_str::<DockStructure>(&json_str).ok()
}
-pub async fn write_default_dock_state(docks: DockStructure) -> anyhow::Result<()> {
+pub async fn write_default_dock_state(
+ kvp: &KeyValueStore,
+ docks: DockStructure,
+) -> anyhow::Result<()> {
let json_str = serde_json::to_string(&docks)?;
- KEY_VALUE_STORE
- .write_kvp(DEFAULT_DOCK_STATE_KEY.to_string(), json_str)
+ kvp.write_kvp(DEFAULT_DOCK_STATE_KEY.to_string(), json_str)
.await?;
Ok(())
}
@@ -980,7 +981,7 @@ impl Domain for WorkspaceDb {
}
}
-db::static_connection!(DB, WorkspaceDb, []);
+db::static_connection!(WorkspaceDb, []);
impl WorkspaceDb {
/// Returns a serialized workspace for the given worktree_roots. If the passed array
@@ -2252,7 +2253,7 @@ impl WorkspaceDb {
use db::sqlez::statement::Statement;
use itertools::Itertools as _;
- DB.clear_trusted_worktrees()
+ self.clear_trusted_worktrees()
.await
.context("clearing previous trust state")?;
@@ -2319,7 +2320,7 @@ VALUES {placeholders};"#
}
pub fn fetch_trusted_worktrees(&self) -> Result<DbTrustedPaths> {
- let trusted_worktrees = DB.trusted_worktrees()?;
+ let trusted_worktrees = self.trusted_worktrees()?;
Ok(trusted_worktrees
.into_iter()
.filter_map(|(abs_path, user_name, host_name)| {
@@ -2358,6 +2359,86 @@ VALUES {placeholders};"#
}
}
+type WorkspaceEntry = (
+ WorkspaceId,
+ SerializedWorkspaceLocation,
+ PathList,
+ DateTime<Utc>,
+);
+
+/// Resolves workspace entries whose paths are git linked worktree checkouts
+/// to their main repository paths.
+///
+/// For each workspace entry:
+/// - If any path is a linked worktree checkout, all worktree paths in that
+/// entry are resolved to their main repository paths, producing a new
+/// `PathList`.
+/// - The resolved entry is then deduplicated against existing entries: if a
+/// workspace with the same paths already exists, the entry with the most
+/// recent timestamp is kept.
+pub async fn resolve_worktree_workspaces(
+ workspaces: impl IntoIterator<Item = WorkspaceEntry>,
+ fs: &dyn Fs,
+) -> Vec<WorkspaceEntry> {
+ // First pass: resolve worktree paths to main repo paths concurrently.
+ let resolved = futures::future::join_all(workspaces.into_iter().map(|entry| async move {
+ let paths = entry.2.paths();
+ if paths.is_empty() {
+ return entry;
+ }
+
+ // Resolve each path concurrently
+ let resolved_paths = futures::future::join_all(
+ paths
+ .iter()
+ .map(|path| project::git_store::resolve_git_worktree_to_main_repo(fs, path)),
+ )
+ .await;
+
+ // If no paths were resolved, this entry is not a worktree — keep as-is
+ if resolved_paths.iter().all(|r| r.is_none()) {
+ return entry;
+ }
+
+ // Build new path list, substituting resolved paths
+ let new_paths: Vec<PathBuf> = paths
+ .iter()
+ .zip(resolved_paths.iter())
+ .map(|(original, resolved)| {
+ resolved
+ .as_ref()
+ .cloned()
+ .unwrap_or_else(|| original.clone())
+ })
+ .collect();
+
+ let new_path_refs: Vec<&Path> = new_paths.iter().map(|p| p.as_path()).collect();
+ (entry.0, entry.1, PathList::new(&new_path_refs), entry.3)
+ }))
+ .await;
+
+ // Second pass: deduplicate by PathList.
+ // When two entries resolve to the same paths, keep the one with the
+ // more recent timestamp.
+ let mut seen: collections::HashMap<Vec<PathBuf>, usize> = collections::HashMap::default();
+ let mut result: Vec<WorkspaceEntry> = Vec::new();
+
+ for entry in resolved {
+ let key: Vec<PathBuf> = entry.2.paths().to_vec();
+ if let Some(&existing_idx) = seen.get(&key) {
+ // Keep the entry with the more recent timestamp
+ if entry.3 > result[existing_idx].3 {
+ result[existing_idx] = entry;
+ }
+ } else {
+ seen.insert(key, result.len());
+ result.push(entry);
+ }
+ }
+
+ result
+}
+
pub fn delete_unloaded_items(
alive_items: Vec<ItemId>,
workspace_id: WorkspaceId,
@@ -2450,7 +2531,7 @@ mod tests {
cx.run_until_parked();
// Read back the persisted state and check that the active workspace ID was written.
- let state_after_add = read_multi_workspace_state(window_id);
+ let state_after_add = cx.update(|_, cx| read_multi_workspace_state(window_id, cx));
let active_workspace2_db_id = workspace2.read_with(cx, |ws, _| ws.database_id());
assert_eq!(
state_after_add.active_workspace_id, active_workspace2_db_id,
@@ -2465,7 +2546,7 @@ mod tests {
cx.run_until_parked();
- let state_after_remove = read_multi_workspace_state(window_id);
+ let state_after_remove = cx.update(|_, cx| read_multi_workspace_state(window_id, cx));
let remaining_db_id =
multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id());
assert_eq!(
@@ -3882,14 +3963,17 @@ mod tests {
}
#[gpui::test]
- async fn test_read_serialized_multi_workspaces_with_state() {
+ async fn test_read_serialized_multi_workspaces_with_state(cx: &mut gpui::TestAppContext) {
use crate::persistence::model::MultiWorkspaceState;
// Write multi-workspace state for two windows via the scoped KVP.
let window_10 = WindowId::from(10u64);
let window_20 = WindowId::from(20u64);
+ let kvp = cx.update(|cx| KeyValueStore::global(cx));
+
write_multi_workspace_state(
+ &kvp,
window_10,
MultiWorkspaceState {
active_workspace_id: Some(WorkspaceId(2)),
@@ -3899,6 +3983,7 @@ mod tests {
.await;
write_multi_workspace_state(
+ &kvp,
window_20,
MultiWorkspaceState {
active_workspace_id: Some(WorkspaceId(3)),
@@ -3935,7 +4020,7 @@ mod tests {
},
];
- let results = read_serialized_multi_workspaces(session_workspaces);
+ let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx));
// Should produce 3 groups: window 10, window 20, and the orphan.
assert_eq!(results.len(), 3);
@@ -3981,14 +4066,16 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+
// Assign a database_id so serialization will actually persist.
- let workspace_id = DB.next_id().await.unwrap();
+ let workspace_id = db.next_id().await.unwrap();
workspace.update(cx, |ws, _cx| {
ws.set_database_id(workspace_id);
});
// Mutate some workspace state.
- DB.set_centered_layout(workspace_id, true).await.unwrap();
+ db.set_centered_layout(workspace_id, true).await.unwrap();
// Call flush_serialization and await the returned task directly
// (without run_until_parked — the point is that awaiting the task
@@ -4000,7 +4087,7 @@ mod tests {
task.await;
// Read the workspace back from the DB and verify serialization happened.
- let serialized = DB.workspace_for_id(workspace_id);
+ let serialized = db.workspace_for_id(workspace_id);
assert!(
serialized.is_some(),
"flush_serialization should have persisted the workspace to DB"
@@ -4053,7 +4140,7 @@ mod tests {
);
// The multi-workspace state should record it as the active workspace.
- let state = read_multi_workspace_state(window_id);
+ let state = cx.update(|_, cx| read_multi_workspace_state(window_id, cx));
assert_eq!(
state.active_workspace_id, new_workspace_db_id,
"Serialized active_workspace_id should match the new workspace's database_id"
@@ -4062,7 +4149,8 @@ mod tests {
// The individual workspace row should exist with real data
// (not just the bare DEFAULT VALUES row from next_id).
let workspace_id = new_workspace_db_id.unwrap();
- let serialized = DB.workspace_for_id(workspace_id);
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+ let serialized = db.workspace_for_id(workspace_id);
assert!(
serialized.is_some(),
"Newly created workspace should be fully serialized in the DB after database_id assignment"
@@ -4095,8 +4183,10 @@ mod tests {
mw.set_random_database_id(cx);
});
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+
// Get a real DB id for workspace2 so the row actually exists.
- let workspace2_db_id = DB.next_id().await.unwrap();
+ let workspace2_db_id = db.next_id().await.unwrap();
multi_workspace.update_in(cx, |mw, window, cx| {
let workspace = cx.new(|cx| crate::Workspace::test_new(project2.clone(), window, cx));
@@ -4108,7 +4198,7 @@ mod tests {
// Save a full workspace row to the DB directly.
let session_id = format!("remove-test-session-{}", Uuid::new_v4());
- DB.save_workspace(SerializedWorkspace {
+ db.save_workspace(SerializedWorkspace {
id: workspace2_db_id,
paths: PathList::new(&[&dir]),
location: SerializedWorkspaceLocation::Local,
@@ -4125,7 +4215,7 @@ mod tests {
.await;
assert!(
- DB.workspace_for_id(workspace2_db_id).is_some(),
+ db.workspace_for_id(workspace2_db_id).is_some(),
"Workspace2 should exist in DB before removal"
);
@@ -4140,11 +4230,11 @@ mod tests {
// projects, but the session binding should be cleared so it is not
// restored as part of any future session.
assert!(
- DB.workspace_for_id(workspace2_db_id).is_some(),
+ db.workspace_for_id(workspace2_db_id).is_some(),
"Removed workspace's DB row should be preserved for recent projects"
);
- let session_workspaces = DB
+ let session_workspaces = db
.last_session_workspace_locations("remove-test-session", None, fs.as_ref())
.await
.unwrap();
@@ -4181,9 +4271,11 @@ mod tests {
let project1 = Project::test(fs.clone(), [], cx).await;
let project2 = Project::test(fs.clone(), [], cx).await;
+ let db = cx.update(|cx| WorkspaceDb::global(cx));
+
// Get real DB ids so the rows actually exist.
- let ws1_id = DB.next_id().await.unwrap();
- let ws2_id = DB.next_id().await.unwrap();
+ let ws1_id = db.next_id().await.unwrap();
+ let ws2_id = db.next_id().await.unwrap();
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
@@ -4205,7 +4297,7 @@ mod tests {
let session_id = "test-zombie-session";
let window_id_val: u64 = 42;
- DB.save_workspace(SerializedWorkspace {
+ db.save_workspace(SerializedWorkspace {
id: ws1_id,
paths: PathList::new(&[dir1.path()]),
location: SerializedWorkspaceLocation::Local,
@@ -4221,7 +4313,7 @@ mod tests {
})
.await;
- DB.save_workspace(SerializedWorkspace {
+ db.save_workspace(SerializedWorkspace {
id: ws2_id,
paths: PathList::new(&[dir2.path()]),
location: SerializedWorkspaceLocation::Local,
@@ -4245,7 +4337,7 @@ mod tests {
cx.run_until_parked();
// The removed workspace should NOT appear in session restoration.
- let locations = DB
+ let locations = db
.last_session_workspace_locations(session_id, None, fs.as_ref())
.await
.unwrap();
@@ -4281,8 +4373,10 @@ mod tests {
let project1 = Project::test(fs.clone(), [], cx).await;
let project2 = Project::test(fs.clone(), [], cx).await;
+ let db = cx.update(|cx| WorkspaceDb::global(cx));
+
// Get a real DB id for workspace2 so the row actually exists.
- let workspace2_db_id = DB.next_id().await.unwrap();
+ let workspace2_db_id = db.next_id().await.unwrap();
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
@@ -4301,7 +4395,7 @@ mod tests {
// Save a full workspace row to the DB directly and let it settle.
let session_id = format!("pending-removal-session-{}", Uuid::new_v4());
- DB.save_workspace(SerializedWorkspace {
+ db.save_workspace(SerializedWorkspace {
id: workspace2_db_id,
paths: PathList::new(&[&dir]),
location: SerializedWorkspaceLocation::Local,
@@ -4347,11 +4441,11 @@ mod tests {
// The row should still exist (for recent projects), but the session
// binding should have been cleared by the pending removal task.
assert!(
- DB.workspace_for_id(workspace2_db_id).is_some(),
+ db.workspace_for_id(workspace2_db_id).is_some(),
"Workspace row should be preserved for recent projects"
);
- let session_workspaces = DB
+ let session_workspaces = db
.last_session_workspace_locations("pending-removal-session", None, fs.as_ref())
.await
.unwrap();
@@ -4401,8 +4495,10 @@ mod tests {
let workspace_id = new_workspace_db_id.unwrap();
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+
assert!(
- DB.workspace_for_id(workspace_id).is_some(),
+ db.workspace_for_id(workspace_id).is_some(),
"The workspace row should exist in the DB"
);
@@ -4413,7 +4509,7 @@ mod tests {
cx.executor().advance_clock(Duration::from_millis(200));
cx.run_until_parked();
- let serialized = DB
+ let serialized = db
.workspace_for_id(workspace_id)
.expect("workspace row should still exist");
assert!(
@@ -4446,7 +4542,8 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace_id = DB.next_id().await.unwrap();
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+ let workspace_id = db.next_id().await.unwrap();
multi_workspace.update_in(cx, |mw, _, cx| {
mw.workspace().update(cx, |ws, _cx| {
ws.set_database_id(workspace_id);
@@ -4459,7 +4556,7 @@ mod tests {
});
task.await;
- let after = DB
+ let after = db
.workspace_for_id(workspace_id)
.expect("workspace row should exist after flush_serialization");
assert!(
@@ -4472,4 +4569,116 @@ mod tests {
before the process exits."
);
}
+
+ #[gpui::test]
+ async fn test_resolve_worktree_workspaces(cx: &mut gpui::TestAppContext) {
+ let fs = fs::FakeFs::new(cx.executor());
+
+ // Main repo with a linked worktree entry
+ fs.insert_tree(
+ "/repo",
+ json!({
+ ".git": {
+ "worktrees": {
+ "feature": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature"
+ }
+ }
+ },
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ // Linked worktree checkout pointing back to /repo
+ fs.insert_tree(
+ "/worktree",
+ json!({
+ ".git": "gitdir: /repo/.git/worktrees/feature",
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ // A plain non-git project
+ fs.insert_tree(
+ "/plain-project",
+ json!({
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ // Another normal git repo (used in mixed-path entry)
+ fs.insert_tree(
+ "/other-repo",
+ json!({
+ ".git": {},
+ "src": { "lib.rs": "" }
+ }),
+ )
+ .await;
+
+ let t0 = Utc::now() - chrono::Duration::hours(4);
+ let t1 = Utc::now() - chrono::Duration::hours(3);
+ let t2 = Utc::now() - chrono::Duration::hours(2);
+ let t3 = Utc::now() - chrono::Duration::hours(1);
+
+ let workspaces = vec![
+ // 1: Main checkout of /repo (opened earlier)
+ (
+ WorkspaceId(1),
+ SerializedWorkspaceLocation::Local,
+ PathList::new(&["/repo"]),
+ t0,
+ ),
+ // 2: Linked worktree of /repo (opened more recently)
+ // Should dedup with #1; more recent timestamp wins.
+ (
+ WorkspaceId(2),
+ SerializedWorkspaceLocation::Local,
+ PathList::new(&["/worktree"]),
+ t1,
+ ),
+ // 3: Mixed-path workspace: one root is a linked worktree,
+ // the other is a normal repo. The worktree path should be
+ // resolved; the normal path kept as-is.
+ (
+ WorkspaceId(3),
+ SerializedWorkspaceLocation::Local,
+ PathList::new(&["/other-repo", "/worktree"]),
+ t2,
+ ),
+ // 4: Non-git project — passed through unchanged.
+ (
+ WorkspaceId(4),
+ SerializedWorkspaceLocation::Local,
+ PathList::new(&["/plain-project"]),
+ t3,
+ ),
+ ];
+
+ let result = resolve_worktree_workspaces(workspaces, fs.as_ref()).await;
+
+ // Should have 3 entries: #1 and #2 deduped into one, plus #3 and #4.
+ assert_eq!(result.len(), 3);
+
+ // First entry: /repo — deduplicated from #1 and #2.
+ // Keeps the position of #1 (first seen), but with #2's later timestamp.
+ assert_eq!(result[0].2.paths(), &[PathBuf::from("/repo")]);
+ assert_eq!(result[0].3, t1);
+
+ // Second entry: mixed-path workspace with worktree resolved.
+ // /worktree → /repo, so paths become [/other-repo, /repo] (sorted).
+ assert_eq!(
+ result[1].2.paths(),
+ &[PathBuf::from("/other-repo"), PathBuf::from("/repo")]
+ );
+ assert_eq!(result[1].0, WorkspaceId(3));
+
+ // Third entry: non-git project, unchanged.
+ assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]);
+ assert_eq!(result[2].0, WorkspaceId(4));
+ }
}
@@ -1,11 +1,11 @@
-use crate::{ItemHandle, Pane};
+use crate::{ItemHandle, MultiWorkspace, Pane, ToggleWorkspaceSidebar};
use gpui::{
AnyView, App, Context, Decorations, Entity, IntoElement, ParentElement, Render, Styled,
Subscription, Window,
};
use std::any::TypeId;
use theme::CLIENT_SIDE_DECORATION_ROUNDING;
-use ui::{h_flex, prelude::*};
+use ui::{Divider, Indicator, Tooltip, prelude::*};
use util::ResultExt;
pub trait StatusItemView: Render {
@@ -35,6 +35,8 @@ pub struct StatusBar {
active_pane: Entity<Pane>,
_observe_active_pane: Subscription,
workspace_sidebar_open: bool,
+ sidebar_has_notifications: bool,
+ show_sidebar_toggle: bool,
}
impl Render for StatusBar {
@@ -43,8 +45,7 @@ impl Render for StatusBar {
.w_full()
.justify_between()
.gap(DynamicSpacing::Base08.rems(cx))
- .py(DynamicSpacing::Base04.rems(cx))
- .px(DynamicSpacing::Base06.rems(cx))
+ .p(DynamicSpacing::Base04.rems(cx))
.bg(cx.theme().colors().status_bar_background)
.map(|el| match window.window_decorations() {
Decorations::Server => el,
@@ -61,17 +62,21 @@ impl Render for StatusBar {
.border_b(px(1.0))
.border_color(cx.theme().colors().status_bar_background),
})
- .child(self.render_left_tools())
+ .child(self.render_left_tools(cx))
.child(self.render_right_tools())
}
}
impl StatusBar {
- fn render_left_tools(&self) -> impl IntoElement {
+ fn render_left_tools(&self, cx: &mut Context<Self>) -> impl IntoElement {
h_flex()
.gap_1()
.min_w_0()
.overflow_x_hidden()
+ .when(
+ self.show_sidebar_toggle && !self.workspace_sidebar_open,
+ |this| this.child(self.render_sidebar_toggle(cx)),
+ )
.children(self.left_items.iter().map(|item| item.to_any()))
}
@@ -82,6 +87,33 @@ impl StatusBar {
.overflow_x_hidden()
.children(self.right_items.iter().rev().map(|item| item.to_any()))
}
+
+ fn render_sidebar_toggle(&self, cx: &mut Context<Self>) -> impl IntoElement {
+ h_flex()
+ .gap_0p5()
+ .child(
+ IconButton::new(
+ "toggle-workspace-sidebar",
+ IconName::ThreadsSidebarLeftClosed,
+ )
+ .icon_size(IconSize::Small)
+ .when(self.sidebar_has_notifications, |this| {
+ this.indicator(Indicator::dot().color(Color::Accent))
+ .indicator_border_color(Some(cx.theme().colors().status_bar_background))
+ })
+ .tooltip(move |_, cx| {
+ Tooltip::for_action("Open Threads Sidebar", &ToggleWorkspaceSidebar, cx)
+ })
+ .on_click(move |_, window, cx| {
+ if let Some(multi_workspace) = window.root::<MultiWorkspace>().flatten() {
+ multi_workspace.update(cx, |multi_workspace, cx| {
+ multi_workspace.toggle_sidebar(window, cx);
+ });
+ }
+ }),
+ )
+ .child(Divider::vertical().color(ui::DividerColor::Border))
+ }
}
impl StatusBar {
@@ -94,6 +126,8 @@ impl StatusBar {
this.update_active_pane_item(window, cx)
}),
workspace_sidebar_open: false,
+ sidebar_has_notifications: false,
+ show_sidebar_toggle: false,
};
this.update_active_pane_item(window, cx);
this
@@ -104,6 +138,16 @@ impl StatusBar {
cx.notify();
}
+ pub fn set_sidebar_has_notifications(&mut self, has: bool, cx: &mut Context<Self>) {
+ self.sidebar_has_notifications = has;
+ cx.notify();
+ }
+
+ pub fn set_show_sidebar_toggle(&mut self, show: bool, cx: &mut Context<Self>) {
+ self.show_sidebar_toggle = show;
+ cx.notify();
+ }
+
pub fn add_left_item<T>(&mut self, item: Entity<T>, window: &mut Window, cx: &mut Context<Self>)
where
T: 'static + StatusItemView,
@@ -6,11 +6,13 @@ use language::Buffer;
use project::{TaskSourceKind, WorktreeId};
use remote::ConnectionState;
use task::{
- DebugScenario, ResolvedTask, SharedTaskContext, SpawnInTerminal, TaskContext, TaskTemplate,
+ DebugScenario, ResolvedTask, SaveStrategy, SharedTaskContext, SpawnInTerminal, TaskContext,
+ TaskTemplate,
};
use ui::Window;
+use util::TryFutureExt;
-use crate::{Toast, Workspace, notifications::NotificationId};
+use crate::{SaveIntent, Toast, Workspace, notifications::NotificationId};
impl Workspace {
pub fn schedule_task(
@@ -73,28 +75,57 @@ impl Workspace {
});
}
- if let Some(terminal_provider) = self.terminal_provider.as_ref() {
- let task_status = terminal_provider.spawn(spawn_in_terminal, window, cx);
-
- let task = cx.spawn(async |w, cx| {
- let res = cx.background_spawn(task_status).await;
- match res {
- Some(Ok(status)) => {
- if status.success() {
- log::debug!("Task spawn succeeded");
- } else {
- log::debug!("Task spawn failed, code: {:?}", status.code());
- }
+ if self.terminal_provider.is_some() {
+ let task = cx.spawn_in(window, async move |workspace, cx| {
+ let save_action = match spawn_in_terminal.save {
+ SaveStrategy::All => {
+ let save_all = workspace.update_in(cx, |workspace, window, cx| {
+ let task = workspace.save_all_internal(SaveIntent::SaveAll, window, cx);
+ // Match the type of the other arm by ignoring the bool value returned
+ cx.background_spawn(async { task.await.map(|_| ()) })
+ });
+ save_all.ok()
}
- Some(Err(e)) => {
- log::error!("Task spawn failed: {e:#}");
- _ = w.update(cx, |w, cx| {
- let id = NotificationId::unique::<ResolvedTask>();
- w.show_toast(Toast::new(id, format!("Task spawn failed: {e}")), cx);
- })
+ SaveStrategy::Current => {
+ let save_current = workspace.update_in(cx, |workspace, window, cx| {
+ workspace.save_active_item(SaveIntent::SaveAll, window, cx)
+ });
+ save_current.ok()
}
- None => log::debug!("Task spawn got cancelled"),
+ SaveStrategy::None => None,
};
+ if let Some(save_action) = save_action {
+ save_action.log_err().await;
+ }
+
+ let spawn_task = workspace.update_in(cx, |workspace, window, cx| {
+ workspace
+ .terminal_provider
+ .as_ref()
+ .map(|terminal_provider| {
+ terminal_provider.spawn(spawn_in_terminal, window, cx)
+ })
+ });
+ if let Some(spawn_task) = spawn_task.ok().flatten() {
+ let res = cx.background_spawn(spawn_task).await;
+ match res {
+ Some(Ok(status)) => {
+ if status.success() {
+ log::debug!("Task spawn succeeded");
+ } else {
+ log::debug!("Task spawn failed, code: {:?}", status.code());
+ }
+ }
+ Some(Err(e)) => {
+ log::error!("Task spawn failed: {e:#}");
+ _ = workspace.update(cx, |w, cx| {
+ let id = NotificationId::unique::<ResolvedTask>();
+ w.show_toast(Toast::new(id, format!("Task spawn failed: {e}")), cx);
+ })
+ }
+ None => log::debug!("Task spawn got cancelled"),
+ };
+ }
});
self.scheduled_tasks.push(task);
}
@@ -134,3 +165,166 @@ impl Workspace {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ TerminalProvider,
+ item::test::{TestItem, TestProjectItem},
+ register_serializable_item,
+ };
+ use gpui::{App, TestAppContext};
+ use parking_lot::Mutex;
+ use project::{FakeFs, Project, TaskSourceKind};
+ use serde_json::json;
+ use std::sync::Arc;
+ use task::TaskTemplate;
+
+ struct Fixture {
+ workspace: Entity<Workspace>,
+ item: Entity<TestItem>,
+ task: ResolvedTask,
+ dirty_before_spawn: Arc<Mutex<Option<bool>>>,
+ }
+
+ #[gpui::test]
+ async fn test_schedule_resolved_task_save_all(cx: &mut TestAppContext) {
+ let (fixture, cx) = create_fixture(cx, SaveStrategy::All).await;
+ fixture.workspace.update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ TaskSourceKind::UserInput,
+ fixture.task,
+ false,
+ window,
+ cx,
+ );
+ });
+ cx.executor().run_until_parked();
+
+ assert_eq!(*fixture.dirty_before_spawn.lock(), Some(false));
+ assert!(cx.read(|cx| !fixture.item.read(cx).is_dirty));
+ }
+
+ #[gpui::test]
+ async fn test_schedule_resolved_task_save_current(cx: &mut TestAppContext) {
+ let (fixture, cx) = create_fixture(cx, SaveStrategy::Current).await;
+ // Add a second inactive dirty item
+ let inactive = add_test_item(&fixture.workspace, "file2.txt", false, cx);
+ fixture.workspace.update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ TaskSourceKind::UserInput,
+ fixture.task,
+ false,
+ window,
+ cx,
+ );
+ });
+ cx.executor().run_until_parked();
+
+ // The active item (fixture.item) should be saved
+ assert_eq!(*fixture.dirty_before_spawn.lock(), Some(false));
+ assert!(cx.read(|cx| !fixture.item.read(cx).is_dirty));
+ // The inactive item should not be saved
+ assert!(cx.read(|cx| inactive.read(cx).is_dirty));
+ }
+
+ #[gpui::test]
+ async fn test_schedule_resolved_task_save_none(cx: &mut TestAppContext) {
+ let (fixture, cx) = create_fixture(cx, SaveStrategy::None).await;
+ fixture.workspace.update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ TaskSourceKind::UserInput,
+ fixture.task,
+ false,
+ window,
+ cx,
+ );
+ });
+ cx.executor().run_until_parked();
+
+ assert_eq!(*fixture.dirty_before_spawn.lock(), Some(true));
+ assert!(cx.read(|cx| fixture.item.read(cx).is_dirty));
+ }
+
+ async fn create_fixture(
+ cx: &mut TestAppContext,
+ save_strategy: SaveStrategy,
+ ) -> (Fixture, &mut gpui::VisualTestContext) {
+ cx.update(|cx| {
+ let settings_store = settings::SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ theme::init(theme::LoadThemes::JustBase, cx);
+ register_serializable_item::<TestItem>(cx);
+ });
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root", json!({ "file.txt": "dirty" }))
+ .await;
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+
+ // Add a dirty item to the workspace
+ let item = add_test_item(&workspace, "file.txt", true, cx);
+
+ let template = TaskTemplate {
+ label: "test".to_string(),
+ command: "echo".to_string(),
+ save: save_strategy,
+ ..Default::default()
+ };
+ let task = template
+ .resolve_task("test", &task::TaskContext::default())
+ .unwrap();
+ let dirty_before_spawn: Arc<Mutex<Option<bool>>> = Arc::default();
+ let terminal_provider = Box::new(TestTerminalProvider {
+ item: item.clone(),
+ dirty_before_spawn: dirty_before_spawn.clone(),
+ });
+ workspace.update(cx, |workspace, _| {
+ workspace.terminal_provider = Some(terminal_provider);
+ });
+ let fixture = Fixture {
+ workspace,
+ item,
+ task,
+ dirty_before_spawn,
+ };
+ (fixture, cx)
+ }
+
+ fn add_test_item(
+ workspace: &Entity<Workspace>,
+ name: &str,
+ active: bool,
+ cx: &mut gpui::VisualTestContext,
+ ) -> Entity<TestItem> {
+ let item = cx.new(|cx| {
+ TestItem::new(cx)
+ .with_dirty(true)
+ .with_project_items(&[TestProjectItem::new(1, name, cx)])
+ });
+ workspace.update_in(cx, |workspace, window, cx| {
+ let pane = workspace.active_pane().clone();
+ workspace.add_item(pane, Box::new(item.clone()), None, true, active, window, cx);
+ });
+ item
+ }
+
+ struct TestTerminalProvider {
+ item: Entity<TestItem>,
+ dirty_before_spawn: Arc<Mutex<Option<bool>>>,
+ }
+
+ impl TerminalProvider for TestTerminalProvider {
+ fn spawn(
+ &self,
+ _task: task::SpawnInTerminal,
+ _window: &mut ui::Window,
+ cx: &mut App,
+ ) -> Task<Option<Result<ExitStatus>>> {
+ *self.dirty_before_spawn.lock() = Some(cx.read_entity(&self.item, |e, _| e.is_dirty));
+ Task::ready(Some(Ok(ExitStatus::default())))
+ }
+ }
+}
@@ -1,6 +1,7 @@
use crate::{
- NewFile, Open, PathList, SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId,
+ NewFile, Open, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceId,
item::{Item, ItemEvent},
+ persistence::WorkspaceDb,
};
use chrono::{DateTime, Utc};
use git::Clone as GitClone;
@@ -271,9 +272,10 @@ impl WelcomePage {
let fs = workspace
.upgrade()
.map(|ws| ws.read(cx).app_state().fs.clone());
+ let db = WorkspaceDb::global(cx);
cx.spawn_in(window, async move |this: WeakEntity<Self>, cx| {
let Some(fs) = fs else { return };
- let workspaces = WORKSPACE_DB
+ let workspaces = db
.recent_workspaces_on_disk(fs.as_ref())
.await
.log_err()
@@ -518,7 +520,7 @@ impl crate::SerializableItem for WelcomePage {
alive_items,
workspace_id,
"welcome_pages",
- &persistence::WELCOME_PAGES,
+ &persistence::WelcomePagesDb::global(cx),
cx,
)
}
@@ -531,7 +533,7 @@ impl crate::SerializableItem for WelcomePage {
window: &mut Window,
cx: &mut App,
) -> Task<gpui::Result<Entity<Self>>> {
- if persistence::WELCOME_PAGES
+ if persistence::WelcomePagesDb::global(cx)
.get_welcome_page(item_id, workspace_id)
.ok()
.is_some_and(|is_open| is_open)
@@ -553,11 +555,10 @@ impl crate::SerializableItem for WelcomePage {
cx: &mut Context<Self>,
) -> Option<Task<gpui::Result<()>>> {
let workspace_id = workspace.database_id()?;
- Some(cx.background_spawn(async move {
- persistence::WELCOME_PAGES
- .save_welcome_page(item_id, workspace_id, true)
- .await
- }))
+ let db = persistence::WelcomePagesDb::global(cx);
+ Some(cx.background_spawn(
+ async move { db.save_welcome_page(item_id, workspace_id, true).await },
+ ))
}
fn should_serialize(&self, event: &Self::Event) -> bool {
@@ -591,7 +592,7 @@ mod persistence {
)]);
}
- db::static_connection!(WELCOME_PAGES, WelcomePagesDb, [WorkspaceDb]);
+ db::static_connection!(WelcomePagesDb, [WorkspaceDb]);
impl WelcomePagesDb {
query! {
@@ -27,8 +27,9 @@ mod workspace_settings;
pub use crate::notifications::NotificationFrame;
pub use dock::Panel;
pub use multi_workspace::{
- DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, Sidebar,
- SidebarHandle, ToggleWorkspaceSidebar,
+ CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace,
+ MultiWorkspaceEvent, NextWorkspace, PreviousWorkspace, Sidebar, SidebarHandle,
+ ToggleWorkspaceSidebar,
};
pub use path_list::{PathList, SerializedPathList};
pub use toast_layer::{ToastAction, ToastLayer, ToastView};
@@ -75,14 +76,14 @@ pub use pane_group::{
ActivePaneDecorator, HANDLE_HITBOX_SIZE, Member, PaneAxis, PaneGroup, PaneRenderContext,
SplitDirection,
};
-use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace};
+use persistence::{SerializedWindowBounds, model::SerializedWorkspace};
pub use persistence::{
- DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items,
+ WorkspaceDb, delete_unloaded_items,
model::{
DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation,
SessionWorkspace,
},
- read_serialized_multi_workspaces,
+ read_serialized_multi_workspaces, resolve_worktree_workspaces,
};
use postage::stream::Stream;
use project::{
@@ -1340,6 +1341,7 @@ pub struct Workspace {
last_open_dock_positions: Vec<DockPosition>,
removing: bool,
_panels_task: Option<Task<Result<()>>>,
+ sidebar_focus_handle: Option<FocusHandle>,
}
impl EventEmitter<Event> for Workspace {}
@@ -1381,10 +1383,10 @@ impl Workspace {
|new_trusted_worktrees, cx| {
let timeout =
cx.background_executor().timer(SERIALIZATION_THROTTLE_TIME);
+ let db = WorkspaceDb::global(cx);
cx.background_spawn(async move {
timeout.await;
- persistence::DB
- .save_trusted_worktrees(new_trusted_worktrees)
+ db.save_trusted_worktrees(new_trusted_worktrees)
.await
.log_err();
})
@@ -1418,7 +1420,13 @@ impl Workspace {
this.collaborator_left(*peer_id, window, cx);
}
- &project::Event::WorktreeRemoved(id) | &project::Event::WorktreeAdded(id) => {
+ &project::Event::WorktreeRemoved(_) => {
+ this.update_window_title(window, cx);
+ this.serialize_workspace(window, cx);
+ this.update_history(cx);
+ }
+
+ &project::Event::WorktreeAdded(id) => {
this.update_window_title(window, cx);
if this
.project()
@@ -1745,6 +1753,7 @@ impl Workspace {
scheduled_tasks: Vec::new(),
last_open_dock_positions: Vec::new(),
removing: false,
+ sidebar_focus_handle: None,
}
}
@@ -1768,6 +1777,8 @@ impl Workspace {
cx,
);
+ let db = WorkspaceDb::global(cx);
+ let kvp = db::kvp::KeyValueStore::global(cx);
cx.spawn(async move |cx| {
let mut paths_to_open = Vec::with_capacity(abs_paths.len());
for path in abs_paths.into_iter() {
@@ -1778,8 +1789,7 @@ impl Workspace {
}
}
- let serialized_workspace =
- persistence::DB.workspace_for_roots(paths_to_open.as_slice());
+ let serialized_workspace = db.workspace_for_roots(paths_to_open.as_slice());
if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) {
paths_to_open = paths.ordered_paths().cloned().collect();
@@ -1811,10 +1821,10 @@ impl Workspace {
let workspace_id = if let Some(serialized_workspace) = serialized_workspace.as_ref() {
serialized_workspace.id
} else {
- DB.next_id().await.unwrap_or_else(|_| Default::default())
+ db.next_id().await.unwrap_or_else(|_| Default::default())
};
- let toolchains = DB.toolchains(workspace_id).await?;
+ let toolchains = db.toolchains(workspace_id).await?;
for (toolchain, worktree_path, path) in toolchains {
let toolchain_path = PathBuf::from(toolchain.path.clone().to_string());
@@ -1897,7 +1907,7 @@ impl Workspace {
// Reopening an existing workspace - restore its saved bounds
(Some(bounds.0), Some(display))
} else if let Some((display, bounds)) =
- persistence::read_default_window_bounds()
+ persistence::read_default_window_bounds(&kvp)
{
// New or empty workspace - use the last known window bounds
(Some(bounds), Some(display))
@@ -1968,7 +1978,7 @@ impl Workspace {
// 1. This is an empty workspace (no paths), AND
// 2. The serialized workspace either doesn't exist or has no paths
if is_empty_workspace && !serialized_workspace_has_paths {
- if let Some(default_docks) = persistence::read_default_dock_state() {
+ if let Some(default_docks) = persistence::read_default_dock_state(&kvp) {
window
.update(cx, |_, window, cx| {
workspace.update(cx, |workspace, cx| {
@@ -2157,12 +2167,24 @@ impl Workspace {
&self.status_bar
}
- pub fn set_workspace_sidebar_open(&self, open: bool, cx: &mut App) {
+ pub fn set_workspace_sidebar_open(
+ &self,
+ open: bool,
+ has_notifications: bool,
+ show_toggle: bool,
+ cx: &mut App,
+ ) {
self.status_bar.update(cx, |status_bar, cx| {
status_bar.set_workspace_sidebar_open(open, cx);
+ status_bar.set_sidebar_has_notifications(has_notifications, cx);
+ status_bar.set_show_sidebar_toggle(show_toggle, cx);
});
}
+ pub fn set_sidebar_focus_handle(&mut self, handle: Option<FocusHandle>) {
+ self.sidebar_focus_handle = handle;
+ }
+
pub fn status_bar_visible(&self, cx: &App) -> bool {
StatusBarSettings::get_global(cx).show
}
@@ -3351,7 +3373,7 @@ impl Workspace {
.map(|wt| wt.read(cx).abs_path().as_ref().to_path_buf())
}
- fn add_folder_to_project(
+ pub fn add_folder_to_project(
&mut self,
_: &AddFolderToProject,
window: &mut Window,
@@ -4481,26 +4503,35 @@ impl Workspace {
) {
use ActivateInDirectionTarget as Target;
enum Origin {
+ Sidebar,
LeftDock,
RightDock,
BottomDock,
Center,
}
- let origin: Origin = [
- (&self.left_dock, Origin::LeftDock),
- (&self.right_dock, Origin::RightDock),
- (&self.bottom_dock, Origin::BottomDock),
- ]
- .into_iter()
- .find_map(|(dock, origin)| {
- if dock.focus_handle(cx).contains_focused(window, cx) && dock.read(cx).is_open() {
- Some(origin)
- } else {
- None
- }
- })
- .unwrap_or(Origin::Center);
+ let origin: Origin = if self
+ .sidebar_focus_handle
+ .as_ref()
+ .is_some_and(|h| h.contains_focused(window, cx))
+ {
+ Origin::Sidebar
+ } else {
+ [
+ (&self.left_dock, Origin::LeftDock),
+ (&self.right_dock, Origin::RightDock),
+ (&self.bottom_dock, Origin::BottomDock),
+ ]
+ .into_iter()
+ .find_map(|(dock, origin)| {
+ if dock.focus_handle(cx).contains_focused(window, cx) && dock.read(cx).is_open() {
+ Some(origin)
+ } else {
+ None
+ }
+ })
+ .unwrap_or(Origin::Center)
+ };
let get_last_active_pane = || {
let pane = self
@@ -4519,7 +4550,20 @@ impl Workspace {
let try_dock =
|dock: &Entity<Dock>| dock.read(cx).is_open().then(|| Target::Dock(dock.clone()));
+ let sidebar_target = self
+ .sidebar_focus_handle
+ .as_ref()
+ .map(|h| Target::Sidebar(h.clone()));
+
let target = match (origin, direction) {
+ // From the sidebar, only Right navigates into the workspace.
+ (Origin::Sidebar, SplitDirection::Right) => try_dock(&self.left_dock)
+ .or_else(|| get_last_active_pane().map(Target::Pane))
+ .or_else(|| try_dock(&self.bottom_dock))
+ .or_else(|| try_dock(&self.right_dock)),
+
+ (Origin::Sidebar, _) => None,
+
// We're in the center, so we first try to go to a different pane,
// otherwise try to go to a dock.
(Origin::Center, direction) => {
@@ -4529,7 +4573,7 @@ impl Workspace {
match direction {
SplitDirection::Up => None,
SplitDirection::Down => try_dock(&self.bottom_dock),
- SplitDirection::Left => try_dock(&self.left_dock),
+ SplitDirection::Left => try_dock(&self.left_dock).or(sidebar_target),
SplitDirection::Right => try_dock(&self.right_dock),
}
}
@@ -4543,18 +4587,24 @@ impl Workspace {
}
}
+ (Origin::LeftDock, SplitDirection::Left) => sidebar_target,
+
(Origin::LeftDock, SplitDirection::Down)
| (Origin::RightDock, SplitDirection::Down) => try_dock(&self.bottom_dock),
(Origin::BottomDock, SplitDirection::Up) => get_last_active_pane().map(Target::Pane),
- (Origin::BottomDock, SplitDirection::Left) => try_dock(&self.left_dock),
+ (Origin::BottomDock, SplitDirection::Left) => {
+ try_dock(&self.left_dock).or(sidebar_target)
+ }
(Origin::BottomDock, SplitDirection::Right) => try_dock(&self.right_dock),
(Origin::RightDock, SplitDirection::Left) => {
if let Some(last_active_pane) = get_last_active_pane() {
Some(Target::Pane(last_active_pane))
} else {
- try_dock(&self.bottom_dock).or_else(|| try_dock(&self.left_dock))
+ try_dock(&self.bottom_dock)
+ .or_else(|| try_dock(&self.left_dock))
+ .or(sidebar_target)
}
}
@@ -4583,6 +4633,9 @@ impl Workspace {
}
})
}
+ Some(ActivateInDirectionTarget::Sidebar(focus_handle)) => {
+ focus_handle.focus(window, cx);
+ }
None => {}
}
}
@@ -5942,7 +5995,8 @@ impl Workspace {
self.update_active_view_for_followers(window, cx);
if let Some(database_id) = self.database_id {
- cx.background_spawn(persistence::DB.update_timestamp(database_id))
+ let db = WorkspaceDb::global(cx);
+ cx.background_spawn(async move { db.update_timestamp(database_id).await })
.detach();
}
} else {
@@ -6011,15 +6065,17 @@ impl Workspace {
let window_bounds = window.inner_window_bounds();
let database_id = self.database_id;
let has_paths = !self.root_paths(cx).is_empty();
+ let db = WorkspaceDb::global(cx);
+ let kvp = db::kvp::KeyValueStore::global(cx);
cx.background_executor().spawn(async move {
if !has_paths {
- persistence::write_default_window_bounds(window_bounds, display_uuid)
+ persistence::write_default_window_bounds(&kvp, window_bounds, display_uuid)
.await
.log_err();
}
if let Some(database_id) = database_id {
- DB.set_window_open_status(
+ db.set_window_open_status(
database_id,
SerializedWindowBounds(window_bounds),
display_uuid,
@@ -6027,7 +6083,7 @@ impl Workspace {
.await
.log_err();
} else {
- persistence::write_default_window_bounds(window_bounds, display_uuid)
+ persistence::write_default_window_bounds(&kvp, window_bounds, display_uuid)
.await
.log_err();
}
@@ -6216,8 +6272,9 @@ impl Workspace {
user_toolchains,
};
+ let db = WorkspaceDb::global(cx);
window.spawn(cx, async move |_| {
- persistence::DB.save_workspace(serialized_workspace).await;
+ db.save_workspace(serialized_workspace).await;
})
}
WorkspaceLocation::DetachFromSession => {
@@ -6225,27 +6282,30 @@ impl Workspace {
let display = window.display(cx).and_then(|d| d.uuid().ok());
// Save dock state for empty local workspaces
let docks = build_serialized_docks(self, window, cx);
+ let db = WorkspaceDb::global(cx);
+ let kvp = db::kvp::KeyValueStore::global(cx);
window.spawn(cx, async move |_| {
- persistence::DB
- .set_window_open_status(
- database_id,
- window_bounds,
- display.unwrap_or_default(),
- )
- .await
- .log_err();
- persistence::DB
- .set_session_id(database_id, None)
+ db.set_window_open_status(
+ database_id,
+ window_bounds,
+ display.unwrap_or_default(),
+ )
+ .await
+ .log_err();
+ db.set_session_id(database_id, None).await.log_err();
+ persistence::write_default_dock_state(&kvp, docks)
.await
.log_err();
- persistence::write_default_dock_state(docks).await.log_err();
})
}
WorkspaceLocation::None => {
// Save dock state for empty non-local workspaces
let docks = build_serialized_docks(self, window, cx);
+ let kvp = db::kvp::KeyValueStore::global(cx);
window.spawn(cx, async move |_| {
- persistence::write_default_dock_state(docks).await.log_err();
+ persistence::write_default_dock_state(&kvp, docks)
+ .await
+ .log_err();
})
}
}
@@ -6675,9 +6735,9 @@ impl Workspace {
trusted_worktrees.update(cx, |trusted_worktrees, _| {
trusted_worktrees.clear_trusted_paths()
});
- let clear_task = persistence::DB.clear_trusted_worktrees();
+ let db = WorkspaceDb::global(cx);
cx.spawn(async move |_, cx| {
- if clear_task.await.log_err().is_some() {
+ if db.clear_trusted_worktrees().await.log_err().is_some() {
cx.update(|cx| reload(cx));
}
})
@@ -6945,6 +7005,12 @@ impl Workspace {
self.modal_layer.read(cx).has_active_modal()
}
+ pub fn is_active_modal_command_palette(&self, cx: &mut App) -> bool {
+ self.modal_layer
+ .read(cx)
+ .is_active_modal_command_palette(cx)
+ }
+
pub fn active_modal<V: ManagedView + 'static>(&self, cx: &App) -> Option<Entity<V>> {
self.modal_layer.read(cx).active_modal()
}
@@ -6983,8 +7049,12 @@ impl Workspace {
) {
self.centered_layout = !self.centered_layout;
if let Some(database_id) = self.database_id() {
- cx.background_spawn(DB.set_centered_layout(database_id, self.centered_layout))
- .detach_and_log_err(cx);
+ let db = WorkspaceDb::global(cx);
+ let centered_layout = self.centered_layout;
+ cx.background_spawn(async move {
+ db.set_centered_layout(database_id, centered_layout).await
+ })
+ .detach_and_log_err(cx);
}
cx.notify();
}
@@ -7488,9 +7558,11 @@ fn open_items(
})
}
+#[derive(Clone)]
enum ActivateInDirectionTarget {
Pane(Entity<Pane>),
Dock(Entity<Dock>),
+ Sidebar(FocusHandle),
}
fn notify_if_database_failed(window: WindowHandle<MultiWorkspace>, cx: &mut AsyncApp) {
@@ -8199,9 +8271,10 @@ impl WorkspaceHandle for Entity<Workspace> {
}
pub async fn last_opened_workspace_location(
+ db: &WorkspaceDb,
fs: &dyn fs::Fs,
) -> Option<(WorkspaceId, SerializedWorkspaceLocation, PathList)> {
- DB.last_workspace(fs)
+ db.last_workspace(fs)
.await
.log_err()
.flatten()
@@ -8209,11 +8282,12 @@ pub async fn last_opened_workspace_location(
}
pub async fn last_session_workspace_locations(
+ db: &WorkspaceDb,
last_session_id: &str,
last_session_window_stack: Option<Vec<WindowId>>,
fs: &dyn fs::Fs,
) -> Option<Vec<SessionWorkspace>> {
- DB.last_session_workspace_locations(last_session_id, last_session_window_stack, fs)
+ db.last_session_workspace_locations(last_session_id, last_session_window_stack, fs)
.await
.log_err()
}
@@ -8835,8 +8909,10 @@ pub fn open_workspace_by_id(
cx,
);
+ let db = WorkspaceDb::global(cx);
+ let kvp = db::kvp::KeyValueStore::global(cx);
cx.spawn(async move |cx| {
- let serialized_workspace = persistence::DB
+ let serialized_workspace = db
.workspace_for_id(workspace_id)
.with_context(|| format!("Workspace {workspace_id:?} not found"))?;
@@ -8868,7 +8944,7 @@ pub fn open_workspace_by_id(
&& let Some(bounds) = serialized_workspace.window_bounds.as_ref()
{
(Some(bounds.0), Some(display))
- } else if let Some((display, bounds)) = persistence::read_default_window_bounds() {
+ } else if let Some((display, bounds)) = persistence::read_default_window_bounds(&kvp) {
(Some(bounds), Some(display))
} else {
(None, None)
@@ -9236,7 +9312,8 @@ async fn open_remote_project_inner(
window: WindowHandle<MultiWorkspace>,
cx: &mut AsyncApp,
) -> Result<Vec<Option<Box<dyn ItemHandle>>>> {
- let toolchains = DB.toolchains(workspace_id).await?;
+ let db = cx.update(|cx| WorkspaceDb::global(cx));
+ let toolchains = db.toolchains(workspace_id).await?;
for (toolchain, worktree_path, path) in toolchains {
project
.update(cx, |this, cx| {
@@ -9326,20 +9403,20 @@ fn deserialize_remote_project(
paths: Vec<PathBuf>,
cx: &AsyncApp,
) -> Task<Result<(WorkspaceId, Option<SerializedWorkspace>)>> {
+ let db = cx.update(|cx| WorkspaceDb::global(cx));
cx.background_spawn(async move {
- let remote_connection_id = persistence::DB
+ let remote_connection_id = db
.get_or_create_remote_connection(connection_options)
.await?;
- let serialized_workspace =
- persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
+ let serialized_workspace = db.remote_workspace_for_roots(&paths, remote_connection_id);
let workspace_id = if let Some(workspace_id) =
serialized_workspace.as_ref().map(|workspace| workspace.id)
{
workspace_id
} else {
- persistence::DB.next_id().await?
+ db.next_id().await?
};
Ok((workspace_id, serialized_workspace))
@@ -9958,14 +10035,15 @@ pub fn remote_workspace_position_from_db(
cx: &App,
) -> Task<Result<WorkspacePosition>> {
let paths = paths_to_open.to_vec();
+ let db = WorkspaceDb::global(cx);
+ let kvp = db::kvp::KeyValueStore::global(cx);
cx.background_spawn(async move {
- let remote_connection_id = persistence::DB
+ let remote_connection_id = db
.get_or_create_remote_connection(connection_options)
.await
.context("fetching serialized ssh project")?;
- let serialized_workspace =
- persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
+ let serialized_workspace = db.remote_workspace_for_roots(&paths, remote_connection_id);
let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() {
(Some(WindowBounds::Windowed(bounds)), None)
@@ -9975,7 +10053,7 @@ pub fn remote_workspace_position_from_db(
.and_then(|workspace| {
Some((workspace.display?, workspace.window_bounds.map(|b| b.0)?))
})
- .or_else(|| persistence::read_default_window_bounds());
+ .or_else(|| persistence::read_default_window_bounds(&kvp));
if let Some((serialized_display, serialized_bounds)) = restorable_bounds {
(Some(serialized_bounds), Some(serialized_display))
@@ -11002,6 +11080,7 @@ mod tests {
assert!(workspace.right_dock().read(cx).is_open());
assert!(!panel.is_zoomed(window, cx));
assert!(!panel.read(cx).focus_handle(cx).contains_focused(window, cx));
+ assert!(pane.read(cx).focus_handle(cx).contains_focused(window, cx));
});
// Close the dock
@@ -11013,6 +11092,7 @@ mod tests {
assert!(!workspace.right_dock().read(cx).is_open());
assert!(!panel.is_zoomed(window, cx));
assert!(!panel.read(cx).focus_handle(cx).contains_focused(window, cx));
+ assert!(pane.read(cx).focus_handle(cx).contains_focused(window, cx));
});
// Open the dock
@@ -13605,6 +13685,7 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
+ cx.set_global(db::AppDatabase::test_new());
theme::init(theme::LoadThemes::JustBase, cx);
});
}
@@ -2,7 +2,7 @@
description = "The fast, collaborative code editor."
edition.workspace = true
name = "zed"
-version = "0.229.0"
+version = "0.230.0"
publish.workspace = true
license = "GPL-3.0-or-later"
authors = ["Zed Team <hi@zed.dev>"]
@@ -7,12 +7,14 @@ fn main() {
// Add rpaths for libraries that webrtc-sys dlopens at runtime.
// This is mostly required for hosts with non-standard SO installation
// locations such as NixOS.
- let dlopened_libs = ["libva", "libva-drm"];
+ let dlopened_libs = ["libva", "libva-drm", "egl"];
let mut rpath_dirs = std::collections::BTreeSet::new();
for lib in &dlopened_libs {
if let Some(libdir) = pkg_config::get_variable(lib, "libdir").ok() {
rpath_dirs.insert(libdir);
+ } else {
+ eprintln!("zed build.rs: {lib} not found in pkg-config's path");
}
}
@@ -14,7 +14,7 @@ use client::{Client, ProxySettings, UserStore, parse_zed_link};
use collab_ui::channel_view::ChannelView;
use collections::HashMap;
use crashes::InitCrashHandler;
-use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE};
+use db::kvp::{GlobalKeyValueStore, KeyValueStore};
use editor::Editor;
use extension::ExtensionHostProxy;
use fs::{Fs, RealFs};
@@ -325,12 +325,16 @@ fn main() {
let app =
Application::with_platform(gpui_platform::current_platform(false)).with_assets(Assets);
+ let app_db = db::AppDatabase::new();
let system_id = app.background_executor().spawn(system_id());
- let installation_id = app.background_executor().spawn(installation_id());
- let session_id = Uuid::new_v4().to_string();
- let session = app
+ let installation_id = app
.background_executor()
- .spawn(Session::new(session_id.clone()));
+ .spawn(installation_id(KeyValueStore::from_app_db(&app_db)));
+ let session_id = Uuid::new_v4().to_string();
+ let session = app.background_executor().spawn(Session::new(
+ session_id.clone(),
+ KeyValueStore::from_app_db(&app_db),
+ ));
crashes::init(
InitCrashHandler {
@@ -451,7 +455,8 @@ fn main() {
});
app.run(move |cx| {
- let db_trusted_paths = match workspace::WORKSPACE_DB.fetch_trusted_worktrees() {
+ cx.set_global(app_db);
+ let db_trusted_paths = match workspace::WorkspaceDb::global(cx).fetch_trusted_worktrees() {
Ok(trusted_paths) => trusted_paths,
Err(e) => {
log::error!("Failed to do initial trusted worktrees fetch: {e:#}");
@@ -1300,42 +1305,37 @@ async fn authenticate(client: Arc<Client>, cx: &AsyncApp) -> Result<()> {
async fn system_id() -> Result<IdType> {
let key_name = "system_id".to_string();
+ let db = GlobalKeyValueStore::global();
- if let Ok(Some(system_id)) = GLOBAL_KEY_VALUE_STORE.read_kvp(&key_name) {
+ if let Ok(Some(system_id)) = db.read_kvp(&key_name) {
return Ok(IdType::Existing(system_id));
}
let system_id = Uuid::new_v4().to_string();
- GLOBAL_KEY_VALUE_STORE
- .write_kvp(key_name, system_id.clone())
- .await?;
+ db.write_kvp(key_name, system_id.clone()).await?;
Ok(IdType::New(system_id))
}
-async fn installation_id() -> Result<IdType> {
+async fn installation_id(db: KeyValueStore) -> Result<IdType> {
let legacy_key_name = "device_id".to_string();
let key_name = "installation_id".to_string();
// Migrate legacy key to new key
- if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&legacy_key_name) {
- KEY_VALUE_STORE
- .write_kvp(key_name, installation_id.clone())
- .await?;
- KEY_VALUE_STORE.delete_kvp(legacy_key_name).await?;
+ if let Ok(Some(installation_id)) = db.read_kvp(&legacy_key_name) {
+ db.write_kvp(key_name, installation_id.clone()).await?;
+ db.delete_kvp(legacy_key_name).await?;
return Ok(IdType::Existing(installation_id));
}
- if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&key_name) {
+ if let Ok(Some(installation_id)) = db.read_kvp(&key_name) {
return Ok(IdType::Existing(installation_id));
}
let installation_id = Uuid::new_v4().to_string();
- KEY_VALUE_STORE
- .write_kvp(key_name, installation_id.clone())
- .await?;
+ db.write_kvp(key_name, installation_id.clone()).await?;
Ok(IdType::New(installation_id))
}
@@ -1344,6 +1344,7 @@ pub(crate) async fn restore_or_create_workspace(
app_state: Arc<AppState>,
cx: &mut AsyncApp,
) -> Result<()> {
+ let kvp = cx.update(|cx| KeyValueStore::global(cx));
if let Some((multi_workspaces, remote_workspaces)) = restorable_workspaces(cx, &app_state).await
{
let mut results: Vec<Result<(), Error>> = Vec::new();
@@ -1452,7 +1453,7 @@ pub(crate) async fn restore_or_create_workspace(
.await?;
}
}
- } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
+ } else if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) {
cx.update(|cx| show_onboarding_view(app_state, cx)).await?;
} else {
cx.update(|cx| {
@@ -1488,7 +1489,8 @@ async fn restorable_workspaces(
let (remote_workspaces, local_workspaces) = locations
.into_iter()
.partition(|sw| matches!(sw.location, SerializedWorkspaceLocation::Remote(_)));
- let multi_workspaces = workspace::read_serialized_multi_workspaces(local_workspaces);
+ let multi_workspaces =
+ cx.update(|cx| workspace::read_serialized_multi_workspaces(local_workspaces, cx));
Some((multi_workspaces, remote_workspaces))
}
@@ -1496,7 +1498,12 @@ pub(crate) async fn restorable_workspace_locations(
cx: &mut AsyncApp,
app_state: &Arc<AppState>,
) -> Option<Vec<SessionWorkspace>> {
- let mut restore_behavior = cx.update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup);
+ let (mut restore_behavior, db) = cx.update(|cx| {
+ (
+ WorkspaceSettings::get(None, cx).restore_on_startup,
+ workspace::WorkspaceDb::global(cx),
+ )
+ });
let session_handle = app_state.session.clone();
let (last_session_id, last_session_window_stack) = cx.update(|cx| {
@@ -1519,7 +1526,7 @@ pub(crate) async fn restorable_workspace_locations(
match restore_behavior {
workspace::RestoreOnStartupBehavior::LastWorkspace => {
- workspace::last_opened_workspace_location(app_state.fs.as_ref())
+ workspace::last_opened_workspace_location(&db, app_state.fs.as_ref())
.await
.map(|(workspace_id, location, paths)| {
vec![SessionWorkspace {
@@ -1535,6 +1542,7 @@ pub(crate) async fn restorable_workspace_locations(
let ordered = last_session_window_stack.is_some();
let mut locations = workspace::last_session_workspace_locations(
+ &db,
&last_session_id,
last_session_window_stack,
app_state.fs.as_ref(),
@@ -103,11 +103,11 @@ use {
feature_flags::FeatureFlagAppExt as _,
git_ui::project_diff::ProjectDiff,
gpui::{
- App, AppContext as _, Bounds, KeyBinding, Modifiers, VisualTestAppContext, WindowBounds,
- WindowHandle, WindowOptions, point, px, size,
+ App, AppContext as _, Bounds, Entity, KeyBinding, Modifiers, VisualTestAppContext,
+ WindowBounds, WindowHandle, WindowOptions, point, px, size,
},
image::RgbaImage,
- project::AgentId,
+ project::{AgentId, Project},
project_panel::ProjectPanel,
settings::{NotifyWhenAgentWaiting, Settings as _},
settings_ui::SettingsWindow,
@@ -1966,6 +1966,7 @@ impl AgentServer for StubAgentServer {
fn connect(
&self,
_delegate: AgentServerDelegate,
+ _project: Entity<Project>,
_cx: &mut App,
) -> gpui::Task<gpui::Result<Rc<dyn AgentConnection>>> {
gpui::Task::ready(Ok(Rc::new(self.connection.clone())))
@@ -2659,8 +2660,8 @@ fn run_multi_workspace_sidebar_visual_tests(
.context("Failed to create sidebar")?;
multi_workspace_window
- .update(cx, |multi_workspace, _window, _cx| {
- multi_workspace.register_sidebar(sidebar.clone());
+ .update(cx, |multi_workspace, _window, cx| {
+ multi_workspace.register_sidebar(sidebar.clone(), cx);
})
.context("Failed to register sidebar")?;
@@ -3191,8 +3192,8 @@ edition = "2021"
.context("Failed to create sidebar")?;
workspace_window
- .update(cx, |multi_workspace, _window, _cx| {
- multi_workspace.register_sidebar(sidebar.clone());
+ .update(cx, |multi_workspace, _window, cx| {
+ multi_workspace.register_sidebar(sidebar.clone(), cx);
})
.context("Failed to register sidebar")?;
@@ -397,8 +397,8 @@ pub fn initialize_workspace(
.update(cx, |_, window, cx| {
let sidebar =
cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx));
- multi_workspace_handle.update(cx, |multi_workspace, _cx| {
- multi_workspace.register_sidebar(sidebar);
+ multi_workspace_handle.update(cx, |multi_workspace, cx| {
+ multi_workspace.register_sidebar(sidebar, cx);
});
})
.ok();
@@ -5960,9 +5960,11 @@ mod tests {
cx.run_until_parked();
// Verify all workspaces retained their session_ids.
- let locations = workspace::last_session_workspace_locations(&session_id, None, fs.as_ref())
- .await
- .expect("expected session workspace locations");
+ let db = cx.update(|cx| workspace::WorkspaceDb::global(cx));
+ let locations =
+ workspace::last_session_workspace_locations(&db, &session_id, None, fs.as_ref())
+ .await
+ .expect("expected session workspace locations");
assert_eq!(
locations.len(),
3,
@@ -5989,9 +5991,10 @@ mod tests {
});
// --- Read back from DB and verify grouping ---
- let locations = workspace::last_session_workspace_locations(&session_id, None, fs.as_ref())
- .await
- .expect("expected session workspace locations");
+ let locations =
+ workspace::last_session_workspace_locations(&db, &session_id, None, fs.as_ref())
+ .await
+ .expect("expected session workspace locations");
assert_eq!(locations.len(), 3, "expected 3 session workspaces");
@@ -31,6 +31,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
MenuItem::action("Toggle All Docks", workspace::ToggleAllDocks),
MenuItem::submenu(Menu {
name: "Editor Layout".into(),
+ disabled: false,
items: vec![
MenuItem::action("Split Up", workspace::SplitUp::default()),
MenuItem::action("Split Down", workspace::SplitDown::default()),
@@ -60,39 +61,31 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
vec![
Menu {
name: "Zed".into(),
+ disabled: false,
items: vec![
MenuItem::action("About Zed", zed_actions::About),
MenuItem::action("Check for Updates", auto_update::Check),
MenuItem::separator(),
- MenuItem::submenu(Menu {
- name: "Settings".into(),
- items: vec![
- MenuItem::action("Open Settings", zed_actions::OpenSettings),
- MenuItem::action("Open Settings File", super::OpenSettingsFile),
- MenuItem::action("Open Project Settings", zed_actions::OpenProjectSettings),
- MenuItem::action(
- "Open Project Settings File",
- super::OpenProjectSettingsFile,
- ),
- MenuItem::action("Open Default Settings", super::OpenDefaultSettings),
- MenuItem::separator(),
- MenuItem::action("Open Keymap", zed_actions::OpenKeymap),
- MenuItem::action("Open Keymap File", zed_actions::OpenKeymapFile),
- MenuItem::action(
- "Open Default Key Bindings",
- zed_actions::OpenDefaultKeymap,
- ),
- MenuItem::separator(),
- MenuItem::action(
- "Select Theme...",
- zed_actions::theme_selector::Toggle::default(),
- ),
- MenuItem::action(
- "Select Icon Theme...",
- zed_actions::icon_theme_selector::Toggle::default(),
- ),
- ],
- }),
+ MenuItem::submenu(Menu::new("Settings").items([
+ MenuItem::action("Open Settings", zed_actions::OpenSettings),
+ MenuItem::action("Open Settings File", super::OpenSettingsFile),
+ MenuItem::action("Open Project Settings", zed_actions::OpenProjectSettings),
+ MenuItem::action("Open Project Settings File", super::OpenProjectSettingsFile),
+ MenuItem::action("Open Default Settings", super::OpenDefaultSettings),
+ MenuItem::separator(),
+ MenuItem::action("Open Keymap", zed_actions::OpenKeymap),
+ MenuItem::action("Open Keymap File", zed_actions::OpenKeymapFile),
+ MenuItem::action("Open Default Key Bindings", zed_actions::OpenDefaultKeymap),
+ MenuItem::separator(),
+ MenuItem::action(
+ "Select Theme...",
+ zed_actions::theme_selector::Toggle::default(),
+ ),
+ MenuItem::action(
+ "Select Icon Theme...",
+ zed_actions::icon_theme_selector::Toggle::default(),
+ ),
+ ])),
MenuItem::separator(),
#[cfg(target_os = "macos")]
MenuItem::os_submenu("Services", gpui::SystemMenuType::Services),
@@ -113,6 +106,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "File".into(),
+ disabled: false,
items: vec![
MenuItem::action("New", workspace::NewFile),
MenuItem::action("New Window", workspace::NewWindow),
@@ -160,6 +154,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "Edit".into(),
+ disabled: false,
items: vec![
MenuItem::os_action("Undo", editor::actions::Undo, OsAction::Undo),
MenuItem::os_action("Redo", editor::actions::Redo, OsAction::Redo),
@@ -180,6 +175,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "Selection".into(),
+ disabled: false,
items: vec![
MenuItem::os_action(
"Select All",
@@ -227,10 +223,12 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "View".into(),
+ disabled: false,
items: view_items,
},
Menu {
name: "Go".into(),
+ disabled: false,
items: vec![
MenuItem::action("Back", workspace::GoBack),
MenuItem::action("Forward", workspace::GoForward),
@@ -262,6 +260,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "Run".into(),
+ disabled: false,
items: vec![
MenuItem::action(
"Spawn Task",
@@ -286,6 +285,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "Window".into(),
+ disabled: false,
items: vec![
MenuItem::action("Minimize", super::Minimize),
MenuItem::action("Zoom", super::Zoom),
@@ -294,6 +294,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
},
Menu {
name: "Help".into(),
+ disabled: false,
items: vec![
MenuItem::action(
"View Release Notes Locally",
@@ -5,7 +5,7 @@ use anyhow::{Context as _, Result, anyhow};
use cli::{CliRequest, CliResponse, ipc::IpcSender};
use cli::{IpcHandshake, ipc};
use client::{ZedLink, parse_zed_link};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
use editor::Editor;
use fs::Fs;
use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender};
@@ -491,7 +491,8 @@ async fn open_workspaces(
if grouped_locations.is_empty() {
// If we have no paths to open, show the welcome screen if this is the first launch
- if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
+ let kvp = cx.update(|cx| KeyValueStore::global(cx));
+ if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) {
cx.update(|cx| show_onboarding_view(app_state, cx).detach());
}
// If not the first launch, show an empty window with empty editor
@@ -110,6 +110,12 @@ pub struct Extensions {
#[serde(deny_unknown_fields)]
pub struct AcpRegistry;
+/// Show call diagnostics and connection quality statistics.
+#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
+#[action(namespace = collab)]
+#[serde(deny_unknown_fields)]
+pub struct ShowCallStats;
+
/// Decreases the font size in the editor buffer.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
@@ -191,6 +197,8 @@ pub mod editor {
MoveUp,
/// Moves cursor down.
MoveDown,
+ /// Reveals the current file in the system file manager.
+ RevealInFileManager,
]
);
}
@@ -770,6 +778,18 @@ pub mod preview {
}
}
+pub mod agents_sidebar {
+ use gpui::actions;
+
+ actions!(
+ agents_sidebar,
+ [
+ /// Moves focus to the sidebar's search/filter editor.
+ FocusSidebarFilter,
+ ]
+ );
+}
+
pub mod notebook {
use gpui::actions;
@@ -2,78 +2,128 @@ use anyhow::{Context as _, Result, anyhow};
pub const MARKER_TAG_PREFIX: &str = "<|marker_";
pub const MARKER_TAG_SUFFIX: &str = "|>";
-const MIN_BLOCK_LINES: usize = 3;
-const MAX_BLOCK_LINES: usize = 8;
+pub const RELATIVE_MARKER_TAG_PREFIX: &str = "<|marker";
+const V0316_MIN_BLOCK_LINES: usize = 3;
+const V0316_MAX_BLOCK_LINES: usize = 8;
+const V0318_MIN_BLOCK_LINES: usize = 6;
+const V0318_MAX_BLOCK_LINES: usize = 16;
+const MAX_NUDGE_LINES: usize = 5;
+pub const V0316_END_MARKER: &str = "<[end▁of▁sentence]>";
+pub const V0317_END_MARKER: &str = "<[end▁of▁sentence]>";
+pub const V0318_END_MARKER: &str = "<[end▁of▁sentence]>";
pub fn marker_tag(number: usize) -> String {
format!("{MARKER_TAG_PREFIX}{number}{MARKER_TAG_SUFFIX}")
}
+pub fn marker_tag_relative(delta: isize) -> String {
+ if delta > 0 {
+ format!("<|marker+{delta}|>")
+ } else if delta == 0 {
+ String::from("<|marker-0|>")
+ } else {
+ format!("<|marker{delta}|>")
+ }
+}
+
+struct LineInfo {
+ start: usize,
+ is_blank: bool,
+ is_good_start: bool,
+}
+
+fn collect_line_info(text: &str) -> Vec<LineInfo> {
+ let mut lines = Vec::new();
+ let mut offset = 0;
+ for line in text.split('\n') {
+ let trimmed = line.trim();
+ let is_blank = trimmed.is_empty();
+ let is_good_start = !is_blank && !is_structural_tail(trimmed);
+ lines.push(LineInfo {
+ start: offset,
+ is_blank,
+ is_good_start,
+ });
+ offset += line.len() + 1;
+ }
+ // split('\n') on "abc\n" yields ["abc", ""] — drop the phantom trailing
+ // empty element when the text ends with '\n'.
+ if text.ends_with('\n') && lines.len() > 1 {
+ lines.pop();
+ }
+ lines
+}
+
+fn is_structural_tail(trimmed_line: &str) -> bool {
+ if trimmed_line.starts_with(&['}', ']', ')']) {
+ return true;
+ }
+ matches!(
+ trimmed_line.trim_end_matches(';'),
+ "break" | "continue" | "return" | "throw" | "end"
+ )
+}
+
+/// Starting from line `from`, scan up to `MAX_NUDGE_LINES` forward to find a
+/// line with `is_good_start`. Returns `None` if no suitable line is found.
+fn skip_to_good_start(lines: &[LineInfo], from: usize) -> Option<usize> {
+ (from..lines.len().min(from + MAX_NUDGE_LINES)).find(|&i| lines[i].is_good_start)
+}
+
/// Compute byte offsets within `editable_text` where marker boundaries should
/// be placed.
///
/// Returns a sorted `Vec<usize>` that always starts with `0` and ends with
/// `editable_text.len()`. Interior offsets are placed at line boundaries
/// (right after a `\n`), preferring blank-line boundaries when available and
-/// respecting `MIN_BLOCK_LINES` / `MAX_BLOCK_LINES` constraints.
-pub fn compute_marker_offsets(editable_text: &str) -> Vec<usize> {
+/// respecting `min_block_lines` / `max_block_lines` constraints.
+fn compute_marker_offsets_with_limits(
+ editable_text: &str,
+ min_block_lines: usize,
+ max_block_lines: usize,
+) -> Vec<usize> {
if editable_text.is_empty() {
return vec![0, 0];
}
+ let lines = collect_line_info(editable_text);
let mut offsets = vec![0usize];
- let mut lines_since_last_marker = 0usize;
- let mut byte_offset = 0usize;
-
- for line in editable_text.split('\n') {
- let line_end = byte_offset + line.len() + 1;
- let is_past_end = line_end > editable_text.len();
- let actual_line_end = line_end.min(editable_text.len());
- lines_since_last_marker += 1;
-
- let is_blank = line.trim().is_empty();
-
- if !is_past_end && lines_since_last_marker >= MIN_BLOCK_LINES {
- if is_blank {
- // Blank-line boundary found. We'll place the marker when we
- // find the next non-blank line (handled below).
- } else if lines_since_last_marker >= MAX_BLOCK_LINES {
- offsets.push(actual_line_end);
- lines_since_last_marker = 0;
- }
- }
+ let mut last_boundary_line = 0;
+ let mut i = 0;
+
+ while i < lines.len() {
+ let gap = i - last_boundary_line;
- // Non-blank line immediately following blank line(s): split here so
- // the new block starts with this line.
- if !is_blank && byte_offset > 0 && lines_since_last_marker >= MIN_BLOCK_LINES {
- let before = &editable_text[..byte_offset];
- let has_preceding_blank_line = before
- .strip_suffix('\n')
- .map(|stripped| {
- let last_line = match stripped.rfind('\n') {
- Some(pos) => &stripped[pos + 1..],
- None => stripped,
- };
- last_line.trim().is_empty()
- })
- .unwrap_or(false);
-
- if has_preceding_blank_line {
- offsets.push(byte_offset);
- lines_since_last_marker = 1;
+ // Blank-line split: non-blank line following blank line(s) with enough
+ // accumulated lines.
+ if gap >= min_block_lines && !lines[i].is_blank && i > 0 && lines[i - 1].is_blank {
+ let target = if lines[i].is_good_start {
+ i
+ } else {
+ skip_to_good_start(&lines, i).unwrap_or(i)
+ };
+ if lines.len() - target >= min_block_lines
+ && lines[target].start > *offsets.last().unwrap_or(&0)
+ {
+ offsets.push(lines[target].start);
+ last_boundary_line = target;
+ i = target + 1;
+ continue;
}
}
- byte_offset = actual_line_end;
-
- // Re-check after blank-line logic since lines_since_last_marker may
- // have been reset.
- if !is_past_end && lines_since_last_marker >= MAX_BLOCK_LINES {
- if *offsets.last().unwrap_or(&0) != actual_line_end {
- offsets.push(actual_line_end);
- lines_since_last_marker = 0;
+ // Hard cap: too many lines without a split.
+ if gap >= max_block_lines {
+ let target = skip_to_good_start(&lines, i).unwrap_or(i);
+ if lines[target].start > *offsets.last().unwrap_or(&0) {
+ offsets.push(lines[target].start);
+ last_boundary_line = target;
+ i = target + 1;
+ continue;
}
}
+
+ i += 1;
}
let end = editable_text.len();
@@ -84,6 +134,15 @@ pub fn compute_marker_offsets(editable_text: &str) -> Vec<usize> {
offsets
}
+/// Compute byte offsets within `editable_text` for the V0316/V0317 block sizing rules.
+pub fn compute_marker_offsets(editable_text: &str) -> Vec<usize> {
+ compute_marker_offsets_with_limits(editable_text, V0316_MIN_BLOCK_LINES, V0316_MAX_BLOCK_LINES)
+}
+
+pub fn compute_marker_offsets_v0318(editable_text: &str) -> Vec<usize> {
+ compute_marker_offsets_with_limits(editable_text, V0318_MIN_BLOCK_LINES, V0318_MAX_BLOCK_LINES)
+}
+
/// Write the editable region content with marker tags, inserting the cursor
/// marker at the given offset within the editable text.
pub fn write_editable_with_markers(
@@ -254,27 +313,8 @@ pub fn encode_from_old_and_new(
}
let marker_offsets = compute_marker_offsets(old_editable);
-
- let common_prefix = old_editable
- .bytes()
- .zip(new_editable.bytes())
- .take_while(|(a, b)| a == b)
- .count();
-
- let old_remaining = old_editable.len() - common_prefix;
- let new_remaining = new_editable.len() - common_prefix;
- let max_suffix = old_remaining.min(new_remaining);
- let common_suffix = old_editable.as_bytes()[old_editable.len() - max_suffix..]
- .iter()
- .rev()
- .zip(
- new_editable.as_bytes()[new_editable.len() - max_suffix..]
- .iter()
- .rev(),
- )
- .take_while(|(a, b)| a == b)
- .count();
-
+ let (common_prefix, common_suffix) =
+ common_prefix_suffix(old_editable.as_bytes(), new_editable.as_bytes());
let change_end_in_old = old_editable.len() - common_suffix;
let start_marker_idx = marker_offsets
@@ -367,6 +407,529 @@ pub fn extract_editable_region_from_markers(text: &str) -> Option<String> {
Some(result)
}
+struct ParsedTag {
+ value: isize,
+ tag_start: usize,
+ tag_end: usize,
+}
+
+fn collect_tags(text: &str, prefix: &str, parse: fn(&str) -> Option<isize>) -> Vec<ParsedTag> {
+ let mut tags = Vec::new();
+ let mut search_from = 0;
+ while let Some(rel_pos) = text[search_from..].find(prefix) {
+ let tag_start = search_from + rel_pos;
+ let payload_start = tag_start + prefix.len();
+ if let Some(suffix_rel) = text[payload_start..].find(MARKER_TAG_SUFFIX) {
+ let payload_end = payload_start + suffix_rel;
+ if let Some(value) = parse(&text[payload_start..payload_end]) {
+ let tag_end = payload_end + MARKER_TAG_SUFFIX.len();
+ tags.push(ParsedTag {
+ value,
+ tag_start,
+ tag_end,
+ });
+ search_from = tag_end;
+ continue;
+ }
+ }
+ search_from = tag_start + prefix.len();
+ }
+ tags
+}
+
+fn collect_marker_tags(text: &str) -> Vec<ParsedTag> {
+ collect_tags(text, MARKER_TAG_PREFIX, |s| {
+ s.parse::<usize>().ok().map(|n| n as isize)
+ })
+}
+
+fn collect_relative_marker_tags(text: &str) -> Vec<ParsedTag> {
+ collect_tags(text, RELATIVE_MARKER_TAG_PREFIX, |s| {
+ s.parse::<isize>().ok()
+ })
+}
+
+pub fn nearest_marker_number(cursor_offset: Option<usize>, marker_offsets: &[usize]) -> usize {
+ let cursor = cursor_offset.unwrap_or(0);
+ marker_offsets
+ .iter()
+ .enumerate()
+ .min_by_key(|(_, offset)| (**offset as isize - cursor as isize).unsigned_abs())
+ .map(|(idx, _)| idx + 1)
+ .unwrap_or(1)
+}
+
+fn cursor_block_index(cursor_offset: Option<usize>, marker_offsets: &[usize]) -> usize {
+ let cursor = cursor_offset.unwrap_or(0);
+ marker_offsets
+ .windows(2)
+ .position(|window| cursor >= window[0] && cursor < window[1])
+ .unwrap_or_else(|| marker_offsets.len().saturating_sub(2))
+}
+
+fn common_prefix_suffix(a: &[u8], b: &[u8]) -> (usize, usize) {
+ let prefix = a.iter().zip(b.iter()).take_while(|(x, y)| x == y).count();
+ let remaining_a = a.len() - prefix;
+ let remaining_b = b.len() - prefix;
+ let max_suffix = remaining_a.min(remaining_b);
+ let suffix = a[a.len() - max_suffix..]
+ .iter()
+ .rev()
+ .zip(b[b.len() - max_suffix..].iter().rev())
+ .take_while(|(x, y)| x == y)
+ .count();
+ (prefix, suffix)
+}
+
+/// Map a byte offset from old span coordinates to new span coordinates,
+/// using common prefix/suffix within the span for accuracy.
+fn map_boundary_offset(
+ old_rel: usize,
+ old_span_len: usize,
+ new_span_len: usize,
+ span_common_prefix: usize,
+ span_common_suffix: usize,
+) -> usize {
+ if old_rel <= span_common_prefix {
+ old_rel
+ } else if old_rel >= old_span_len - span_common_suffix {
+ new_span_len - (old_span_len - old_rel)
+ } else {
+ let old_changed_start = span_common_prefix;
+ let old_changed_len = old_span_len
+ .saturating_sub(span_common_prefix)
+ .saturating_sub(span_common_suffix);
+ let new_changed_start = span_common_prefix;
+ let new_changed_len = new_span_len
+ .saturating_sub(span_common_prefix)
+ .saturating_sub(span_common_suffix);
+
+ if old_changed_len == 0 {
+ new_changed_start
+ } else {
+ new_changed_start + ((old_rel - old_changed_start) * new_changed_len / old_changed_len)
+ }
+ }
+}
+
+fn snap_to_line_start(text: &str, offset: usize) -> usize {
+ let bounded = offset.min(text.len());
+ let bounded = text.floor_char_boundary(bounded);
+
+ if bounded >= text.len() {
+ return text.len();
+ }
+
+ if bounded == 0 || text.as_bytes().get(bounded - 1) == Some(&b'\n') {
+ return bounded;
+ }
+
+ if let Some(next_nl_rel) = text[bounded..].find('\n') {
+ let next = bounded + next_nl_rel + 1;
+ return text.floor_char_boundary(next.min(text.len()));
+ }
+
+ let prev_start = text[..bounded].rfind('\n').map(|idx| idx + 1).unwrap_or(0);
+ text.floor_char_boundary(prev_start)
+}
+
+/// Write the editable region content with byte-exact marker tags, inserting the
+/// cursor marker at the given offset within the editable text.
+///
+/// The `tag_for_index` closure maps a boundary index to the marker tag string.
+fn write_editable_with_markers_impl(
+ output: &mut String,
+ editable_text: &str,
+ cursor_offset_in_editable: usize,
+ cursor_marker: &str,
+ marker_offsets: &[usize],
+ tag_for_index: impl Fn(usize) -> String,
+) {
+ let mut cursor_placed = false;
+ for (i, &offset) in marker_offsets.iter().enumerate() {
+ output.push_str(&tag_for_index(i));
+
+ if let Some(&next_offset) = marker_offsets.get(i + 1) {
+ let block = &editable_text[offset..next_offset];
+ if !cursor_placed
+ && cursor_offset_in_editable >= offset
+ && cursor_offset_in_editable <= next_offset
+ {
+ cursor_placed = true;
+ let cursor_in_block = cursor_offset_in_editable - offset;
+ output.push_str(&block[..cursor_in_block]);
+ output.push_str(cursor_marker);
+ output.push_str(&block[cursor_in_block..]);
+ } else {
+ output.push_str(block);
+ }
+ }
+ }
+}
+
+pub fn write_editable_with_markers_v0316(
+ output: &mut String,
+ editable_text: &str,
+ cursor_offset_in_editable: usize,
+ cursor_marker: &str,
+) {
+ let marker_offsets = compute_marker_offsets(editable_text);
+ write_editable_with_markers_impl(
+ output,
+ editable_text,
+ cursor_offset_in_editable,
+ cursor_marker,
+ &marker_offsets,
+ |i| marker_tag(i + 1),
+ );
+}
+
+pub fn write_editable_with_markers_v0317(
+ output: &mut String,
+ editable_text: &str,
+ cursor_offset_in_editable: usize,
+ cursor_marker: &str,
+) {
+ let marker_offsets = compute_marker_offsets(editable_text);
+ let anchor_idx = cursor_block_index(Some(cursor_offset_in_editable), &marker_offsets);
+ write_editable_with_markers_impl(
+ output,
+ editable_text,
+ cursor_offset_in_editable,
+ cursor_marker,
+ &marker_offsets,
+ |i| marker_tag_relative(i as isize - anchor_idx as isize),
+ );
+}
+
+pub fn write_editable_with_markers_v0318(
+ output: &mut String,
+ editable_text: &str,
+ cursor_offset_in_editable: usize,
+ cursor_marker: &str,
+) {
+ let marker_offsets = compute_marker_offsets_v0318(editable_text);
+ write_editable_with_markers_impl(
+ output,
+ editable_text,
+ cursor_offset_in_editable,
+ cursor_marker,
+ &marker_offsets,
+ |i| marker_tag(i + 1),
+ );
+}
+
+/// Parse byte-exact model output and reconstruct the full new editable region.
+///
+/// `resolve_boundary` maps a parsed tag value to an absolute byte offset in
+/// old_editable, given the marker_offsets. Returns `(start_byte, end_byte)` or
+/// an error.
+fn apply_marker_span_impl(
+ old_editable: &str,
+ tags: &[ParsedTag],
+ output: &str,
+ resolve_boundaries: impl Fn(isize, isize) -> Result<(usize, usize)>,
+) -> Result<String> {
+ if tags.is_empty() {
+ return Err(anyhow!("no marker tags found in output"));
+ }
+ if tags.len() == 1 {
+ return Err(anyhow!(
+ "only one marker tag found in output, expected at least two"
+ ));
+ }
+
+ let start_value = tags[0].value;
+ let end_value = tags[tags.len() - 1].value;
+
+ if start_value == end_value {
+ return Ok(old_editable.to_string());
+ }
+
+ let (start_byte, end_byte) = resolve_boundaries(start_value, end_value)?;
+
+ if start_byte > end_byte {
+ return Err(anyhow!("start marker must come before end marker"));
+ }
+
+ let mut new_content = String::new();
+ for i in 0..tags.len() - 1 {
+ let content_start = tags[i].tag_end;
+ let content_end = tags[i + 1].tag_start;
+ if content_start <= content_end {
+ new_content.push_str(&output[content_start..content_end]);
+ }
+ }
+
+ let mut result = String::new();
+ result.push_str(&old_editable[..start_byte]);
+ result.push_str(&new_content);
+ result.push_str(&old_editable[end_byte..]);
+
+ Ok(result)
+}
+
+pub fn apply_marker_span_v0316(old_editable: &str, output: &str) -> Result<String> {
+ let tags = collect_marker_tags(output);
+
+ // Validate monotonically increasing with no gaps (best-effort warning)
+ if tags.len() >= 2 {
+ let start_num = tags[0].value;
+ let end_num = tags[tags.len() - 1].value;
+ if start_num != end_num {
+ let expected: Vec<isize> = (start_num..=end_num).collect();
+ let actual: Vec<isize> = tags.iter().map(|t| t.value).collect();
+ if actual != expected {
+ eprintln!(
+ "V0316 marker sequence validation failed: expected {:?}, got {:?}. Attempting best-effort parse.",
+ expected, actual
+ );
+ }
+ }
+ }
+
+ let marker_offsets = compute_marker_offsets(old_editable);
+ apply_marker_span_impl(old_editable, &tags, output, |start_val, end_val| {
+ let start_idx = (start_val as usize)
+ .checked_sub(1)
+ .context("marker numbers are 1-indexed")?;
+ let end_idx = (end_val as usize)
+ .checked_sub(1)
+ .context("marker numbers are 1-indexed")?;
+ let start_byte = *marker_offsets
+ .get(start_idx)
+ .context("start marker number out of range")?;
+ let end_byte = *marker_offsets
+ .get(end_idx)
+ .context("end marker number out of range")?;
+ Ok((start_byte, end_byte))
+ })
+}
+
+pub fn apply_marker_span_v0317(
+ old_editable: &str,
+ output: &str,
+ cursor_offset_in_old: Option<usize>,
+) -> Result<String> {
+ let tags = collect_relative_marker_tags(output);
+ let marker_offsets = compute_marker_offsets(old_editable);
+ let anchor_idx = cursor_block_index(cursor_offset_in_old, &marker_offsets);
+
+ apply_marker_span_impl(old_editable, &tags, output, |start_delta, end_delta| {
+ let start_idx_signed = anchor_idx as isize + start_delta;
+ let end_idx_signed = anchor_idx as isize + end_delta;
+ if start_idx_signed < 0 || end_idx_signed < 0 {
+ return Err(anyhow!("relative marker maps before first marker"));
+ }
+ let start_idx = usize::try_from(start_idx_signed).context("invalid start marker index")?;
+ let end_idx = usize::try_from(end_idx_signed).context("invalid end marker index")?;
+ let start_byte = *marker_offsets
+ .get(start_idx)
+ .context("start marker number out of range")?;
+ let end_byte = *marker_offsets
+ .get(end_idx)
+ .context("end marker number out of range")?;
+ Ok((start_byte, end_byte))
+ })
+}
+
+pub fn apply_marker_span_v0318(old_editable: &str, output: &str) -> Result<String> {
+ let tags = collect_marker_tags(output);
+
+ if tags.len() >= 2 {
+ let start_num = tags[0].value;
+ let end_num = tags[tags.len() - 1].value;
+ if start_num != end_num {
+ let expected: Vec<isize> = (start_num..=end_num).collect();
+ let actual: Vec<isize> = tags.iter().map(|t| t.value).collect();
+ if actual != expected {
+ eprintln!(
+ "V0318 marker sequence validation failed: expected {:?}, got {:?}. Attempting best-effort parse.",
+ expected, actual
+ );
+ }
+ }
+ }
+
+ let marker_offsets = compute_marker_offsets_v0318(old_editable);
+ apply_marker_span_impl(old_editable, &tags, output, |start_val, end_val| {
+ let start_idx = (start_val as usize)
+ .checked_sub(1)
+ .context("marker numbers are 1-indexed")?;
+ let end_idx = (end_val as usize)
+ .checked_sub(1)
+ .context("marker numbers are 1-indexed")?;
+ let start_byte = *marker_offsets
+ .get(start_idx)
+ .context("start marker number out of range")?;
+ let end_byte = *marker_offsets
+ .get(end_idx)
+ .context("end marker number out of range")?;
+ Ok((start_byte, end_byte))
+ })
+}
+
+/// Encode the training target from old and new editable text.
+///
+/// Shared implementation for V0316, V0317, and V0318. The `tag_for_block_idx`
+/// closure maps a block index to the appropriate marker tag string.
+/// `no_edit_tag` is the marker tag to repeat when there are no edits.
+fn encode_from_old_and_new_impl(
+ old_editable: &str,
+ new_editable: &str,
+ cursor_offset_in_new: Option<usize>,
+ cursor_marker: &str,
+ end_marker: &str,
+ no_edit_tag: &str,
+ marker_offsets: &[usize],
+ tag_for_block_idx: impl Fn(usize) -> String,
+) -> Result<String> {
+ if old_editable == new_editable {
+ return Ok(format!("{no_edit_tag}{no_edit_tag}{end_marker}"));
+ }
+
+ let (common_prefix, common_suffix) =
+ common_prefix_suffix(old_editable.as_bytes(), new_editable.as_bytes());
+ let change_end_in_old = old_editable.len() - common_suffix;
+
+ let start_marker_idx = marker_offsets
+ .iter()
+ .rposition(|&offset| offset <= common_prefix)
+ .unwrap_or(0);
+ let end_marker_idx = marker_offsets
+ .iter()
+ .position(|&offset| offset >= change_end_in_old)
+ .unwrap_or(marker_offsets.len() - 1);
+
+ let old_start = marker_offsets[start_marker_idx];
+ let old_end = marker_offsets[end_marker_idx];
+
+ let new_start = old_start;
+ let new_end = new_editable
+ .len()
+ .saturating_sub(old_editable.len().saturating_sub(old_end));
+
+ let new_span = &new_editable[new_start..new_end];
+ let old_span = &old_editable[old_start..old_end];
+
+ let (span_common_prefix, span_common_suffix) =
+ common_prefix_suffix(old_span.as_bytes(), new_span.as_bytes());
+
+ let mut result = String::new();
+ let mut prev_new_rel = 0usize;
+ let mut cursor_placed = false;
+
+ for block_idx in start_marker_idx..end_marker_idx {
+ result.push_str(&tag_for_block_idx(block_idx));
+
+ let new_rel_end = if block_idx + 1 == end_marker_idx {
+ new_span.len()
+ } else {
+ let old_rel = marker_offsets[block_idx + 1] - old_start;
+ let mapped = map_boundary_offset(
+ old_rel,
+ old_span.len(),
+ new_span.len(),
+ span_common_prefix,
+ span_common_suffix,
+ );
+ snap_to_line_start(new_span, mapped)
+ };
+
+ let new_rel_end = new_rel_end.max(prev_new_rel);
+ let block_content = &new_span[prev_new_rel..new_rel_end];
+
+ if !cursor_placed {
+ if let Some(cursor_offset) = cursor_offset_in_new {
+ let abs_start = new_start + prev_new_rel;
+ let abs_end = new_start + new_rel_end;
+ if cursor_offset >= abs_start && cursor_offset <= abs_end {
+ cursor_placed = true;
+ let cursor_in_block = cursor_offset - abs_start;
+ let bounded = cursor_in_block.min(block_content.len());
+ result.push_str(&block_content[..bounded]);
+ result.push_str(cursor_marker);
+ result.push_str(&block_content[bounded..]);
+ prev_new_rel = new_rel_end;
+ continue;
+ }
+ }
+ }
+
+ result.push_str(block_content);
+ prev_new_rel = new_rel_end;
+ }
+
+ result.push_str(&tag_for_block_idx(end_marker_idx));
+ result.push_str(end_marker);
+
+ Ok(result)
+}
+
+pub fn encode_from_old_and_new_v0316(
+ old_editable: &str,
+ new_editable: &str,
+ cursor_offset_in_new: Option<usize>,
+ cursor_marker: &str,
+ end_marker: &str,
+) -> Result<String> {
+ let marker_offsets = compute_marker_offsets(old_editable);
+ let no_edit_tag = marker_tag(nearest_marker_number(cursor_offset_in_new, &marker_offsets));
+ encode_from_old_and_new_impl(
+ old_editable,
+ new_editable,
+ cursor_offset_in_new,
+ cursor_marker,
+ end_marker,
+ &no_edit_tag,
+ &marker_offsets,
+ |block_idx| marker_tag(block_idx + 1),
+ )
+}
+
+pub fn encode_from_old_and_new_v0317(
+ old_editable: &str,
+ new_editable: &str,
+ cursor_offset_in_new: Option<usize>,
+ cursor_marker: &str,
+ end_marker: &str,
+) -> Result<String> {
+ let marker_offsets = compute_marker_offsets(old_editable);
+ let anchor_idx = cursor_block_index(cursor_offset_in_new, &marker_offsets);
+ let no_edit_tag = marker_tag_relative(0);
+ encode_from_old_and_new_impl(
+ old_editable,
+ new_editable,
+ cursor_offset_in_new,
+ cursor_marker,
+ end_marker,
+ &no_edit_tag,
+ &marker_offsets,
+ |block_idx| marker_tag_relative(block_idx as isize - anchor_idx as isize),
+ )
+}
+
+pub fn encode_from_old_and_new_v0318(
+ old_editable: &str,
+ new_editable: &str,
+ cursor_offset_in_new: Option<usize>,
+ cursor_marker: &str,
+ end_marker: &str,
+) -> Result<String> {
+ let marker_offsets = compute_marker_offsets_v0318(old_editable);
+ let no_edit_tag = marker_tag(nearest_marker_number(cursor_offset_in_new, &marker_offsets));
+ encode_from_old_and_new_impl(
+ old_editable,
+ new_editable,
+ cursor_offset_in_new,
+ cursor_marker,
+ end_marker,
+ &no_edit_tag,
+ &marker_offsets,
+ |block_idx| marker_tag(block_idx + 1),
+ )
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -387,6 +950,88 @@ mod tests {
assert_eq!(*offsets.last().unwrap(), text.len());
}
+ #[test]
+ fn test_compute_marker_offsets_blank_line_split_overrides_pending_hard_cap_boundary() {
+ let text = "\
+class OCRDataframe(BaseModel):
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+
+ df: pl.DataFrame
+
+ def page(self, page_number: int = 0) -> \"OCRDataframe\":
+ # Filter dataframe on specific page
+ df_page = self.df.filter(pl.col(\"page\") == page_number)
+ return OCRDataframe(df=df_page)
+
+ def get_text_cell(
+ self,
+ cell: Cell,
+ margin: int = 0,
+ page_number: Optional[int] = None,
+ min_confidence: int = 50,
+ ) -> Optional[str]:
+ \"\"\"
+ Get text corresponding to cell
+";
+ let offsets = compute_marker_offsets(text);
+
+ let def_start = text
+ .find(" def get_text_cell(")
+ .expect("def line exists");
+ let self_start = text.find(" self,").expect("self line exists");
+
+ assert!(
+ offsets.contains(&def_start),
+ "expected boundary at def line start ({def_start}), got {offsets:?}"
+ );
+ assert!(
+ !offsets.contains(&self_start),
+ "did not expect boundary at self line start ({self_start}), got {offsets:?}"
+ );
+ }
+
+ #[test]
+ fn test_compute_marker_offsets_blank_line_split_skips_closer_line() {
+ let text = "\
+impl Plugin for AhoySchedulePlugin {
+ fn build(&self, app: &mut App) {
+ app.configure_sets(
+ self.schedule,
+ (
+ AhoySystems::MoveCharacters,
+ AhoySystems::ApplyForcesToDynamicRigidBodies,
+ )
+ .chain()
+ .before(PhysicsSystems::First),
+ );
+
+ }
+}
+
+/// System set used by all systems of `bevy_ahoy`.
+#[derive(SystemSet, Debug, Clone, Copy, Hash, PartialEq, Eq)]
+pub enum AhoySystems {
+ MoveCharacters,
+ ApplyForcesToDynamicRigidBodies,
+}
+";
+ let offsets = compute_marker_offsets(text);
+
+ let closer_start = text.find(" }\n").expect("closer line exists");
+ let doc_start = text
+ .find("/// System set used by all systems of `bevy_ahoy`.")
+ .expect("doc line exists");
+
+ assert!(
+ !offsets.contains(&closer_start),
+ "did not expect boundary at closer line start ({closer_start}), got {offsets:?}"
+ );
+ assert!(
+ offsets.contains(&doc_start),
+ "expected boundary at doc line start ({doc_start}), got {offsets:?}"
+ );
+ }
+
#[test]
fn test_compute_marker_offsets_max_lines_split() {
let text = "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n";
@@ -394,12 +1039,152 @@ mod tests {
assert!(offsets.len() >= 3, "offsets: {:?}", offsets);
}
+ #[test]
+ fn test_compute_marker_offsets_hard_cap_nudges_past_closer_to_case_line() {
+ let text = "a1\na2\na3\na4\na5\na6\na7\na8\n}\ncase 'x': {\nbody\n";
+ let offsets = compute_marker_offsets(text);
+
+ let expected = text.find("case 'x': {").expect("case line exists");
+ assert!(
+ offsets.contains(&expected),
+ "expected nudged boundary at case line start ({expected}), got {offsets:?}"
+ );
+ }
+
+ #[test]
+ fn test_compute_marker_offsets_hard_cap_nudge_respects_max_forward_lines() {
+ let text = "a1\na2\na3\na4\na5\na6\na7\na8\n}\n}\n}\n}\n}\ncase 'x': {\nbody\n";
+ let offsets = compute_marker_offsets(text);
+
+ let case_start = text.find("case 'x': {").expect("case line exists");
+ assert!(
+ !offsets.contains(&case_start),
+ "boundary should not nudge beyond max forward lines; offsets: {offsets:?}"
+ );
+ }
+
+ #[test]
+ fn test_compute_marker_offsets_stay_sorted_when_hard_cap_boundary_nudges_forward() {
+ let text = "\
+aaaaaaaaaa = 1;
+bbbbbbbbbb = 2;
+cccccccccc = 3;
+dddddddddd = 4;
+eeeeeeeeee = 5;
+ffffffffff = 6;
+gggggggggg = 7;
+hhhhhhhhhh = 8;
+ };
+ };
+
+ grafanaDashboards = {
+ cluster-overview.spec = {
+ inherit instanceSelector;
+ folderRef = \"infrastructure\";
+ json = builtins.readFile ./grafana/dashboards/cluster-overview.json;
+ };
+ };
+";
+ let offsets = compute_marker_offsets(text);
+
+ assert_eq!(offsets.first().copied(), Some(0), "offsets: {offsets:?}");
+ assert_eq!(
+ offsets.last().copied(),
+ Some(text.len()),
+ "offsets: {offsets:?}"
+ );
+ assert!(
+ offsets.windows(2).all(|window| window[0] <= window[1]),
+ "offsets must be sorted: {offsets:?}"
+ );
+ }
+
#[test]
fn test_compute_marker_offsets_empty() {
let offsets = compute_marker_offsets("");
assert_eq!(offsets, vec![0, 0]);
}
+ #[test]
+ fn test_compute_marker_offsets_avoid_short_markdown_blocks() {
+ let text = "\
+# Spree Posts
+
+This is a Posts extension for [Spree Commerce](https://spreecommerce.org), built with Ruby on Rails.
+
+## Installation
+
+1. Add this extension to your Gemfile with this line:
+
+ ```ruby
+ bundle add spree_posts
+ ```
+
+2. Run the install generator
+
+ ```ruby
+ bundle exec rails g spree_posts:install
+ ```
+
+3. Restart your server
+
+ If your server was running, restart it so that it can find the assets properly.
+
+## Developing
+
+1. Create a dummy app
+
+ ```bash
+ bundle update
+ bundle exec rake test_app
+ ```
+
+2. Add your new code
+3. Run tests
+
+ ```bash
+ bundle exec rspec
+ ```
+
+When testing your applications integration with this extension you may use it's factories.
+Simply add this require statement to your spec_helper:
+
+```ruby
+require 'spree_posts/factories'
+```
+
+## Releasing a new version
+
+```shell
+bundle exec gem bump -p -t
+bundle exec gem release
+```
+
+For more options please see [gem-release README](https://github.com/svenfuchs/gem-release)
+
+## Contributing
+
+If you'd like to contribute, please take a look at the contributing guide.
+";
+ let offsets = compute_marker_offsets(text);
+
+ assert_eq!(offsets.first().copied(), Some(0), "offsets: {offsets:?}");
+ assert_eq!(
+ offsets.last().copied(),
+ Some(text.len()),
+ "offsets: {offsets:?}"
+ );
+
+ for window in offsets.windows(2) {
+ let block = &text[window[0]..window[1]];
+ let line_count = block.lines().count();
+ assert!(
+ line_count >= V0316_MIN_BLOCK_LINES,
+ "block too short: {line_count} lines in block {block:?} with offsets {offsets:?}"
+ );
+ }
+ }
+
#[test]
fn test_extract_marker_span() {
let text = "<|marker_2|>\n new content\n<|marker_3|>\n";
@@ -554,4 +1339,315 @@ mod tests {
"line1\nline2"
);
}
+
+ #[test]
+ fn test_write_editable_with_markers_v0316_byte_exact() {
+ let editable = "aaa\nbbb\nccc\n";
+ let mut output = String::new();
+ write_editable_with_markers_v0316(&mut output, editable, 4, "<|user_cursor|>");
+ assert!(output.starts_with("<|marker_1|>"));
+ assert!(output.contains("<|user_cursor|>"));
+ let stripped = output.replace("<|user_cursor|>", "");
+ let stripped = strip_marker_tags(&stripped);
+ assert_eq!(stripped, editable);
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0316_basic() {
+ let old = "aaa\nbbb\nccc\n";
+ let output = "<|marker_1|>aaa\nBBB\nccc\n<|marker_2|>";
+ let result = apply_marker_span_v0316(old, output).unwrap();
+ assert_eq!(result, "aaa\nBBB\nccc\n");
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0316_no_edit() {
+ let old = "aaa\nbbb\nccc\n";
+ let output = "<|marker_1|><|marker_1|>";
+ let result = apply_marker_span_v0316(old, output).unwrap();
+ assert_eq!(result, old);
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0316_no_edit_any_marker() {
+ let old = "aaa\nbbb\nccc\n";
+ let output = "<|marker_2|>ignored content<|marker_2|>";
+ let result = apply_marker_span_v0316(old, output).unwrap();
+ assert_eq!(result, old);
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0316_multi_block() {
+ let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\n";
+ let marker_offsets = compute_marker_offsets(old);
+ assert!(
+ marker_offsets.len() >= 3,
+ "expected at least 3 offsets, got {:?}",
+ marker_offsets
+ );
+
+ let new_content = "LINE1\nLINE2\nLINE3\n\nLINE5\nLINE6\nLINE7\nLINE8\n";
+ let mut output = String::new();
+ output.push_str("<|marker_1|>");
+ for i in 0..marker_offsets.len() - 1 {
+ if i > 0 {
+ output.push_str(&marker_tag(i + 1));
+ }
+ let start = marker_offsets[i];
+ let end = marker_offsets[i + 1];
+ let block_len = end - start;
+ output.push_str(&new_content[start..start + block_len]);
+ }
+ let last_marker_num = marker_offsets.len();
+ output.push_str(&marker_tag(last_marker_num));
+ let result = apply_marker_span_v0316(old, &output).unwrap();
+ assert_eq!(result, new_content);
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0316_byte_exact_no_normalization() {
+ let old = "aaa\nbbb\nccc\n";
+ let output = "<|marker_1|>aaa\nBBB\nccc<|marker_2|>";
+ let result = apply_marker_span_v0316(old, output).unwrap();
+ assert_eq!(result, "aaa\nBBB\nccc");
+ }
+
+ #[test]
+ fn test_encode_v0316_no_edits() {
+ let old = "aaa\nbbb\nccc\n";
+ let result =
+ encode_from_old_and_new_v0316(old, old, Some(5), "<|user_cursor|>", "<|end|>").unwrap();
+ assert!(result.ends_with("<|end|>"));
+ let stripped = result.strip_suffix("<|end|>").unwrap();
+ let result_parsed = apply_marker_span_v0316(old, stripped).unwrap();
+ assert_eq!(result_parsed, old);
+ }
+
+ #[test]
+ fn test_encode_v0316_with_change() {
+ let old = "aaa\nbbb\nccc\n";
+ let new = "aaa\nBBB\nccc\n";
+ let result =
+ encode_from_old_and_new_v0316(old, new, None, "<|user_cursor|>", "<|end|>").unwrap();
+ assert!(result.contains("<|marker_1|>"));
+ assert!(result.contains("<|marker_2|>"));
+ assert!(result.ends_with("<|end|>"));
+ }
+
+ #[test]
+ fn test_roundtrip_v0316() {
+ let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\nline9\nline10\n";
+ let new = "line1\nline2\nline3\n\nline5\nLINE6\nline7\nline8\nline9\nline10\n";
+ let encoded =
+ encode_from_old_and_new_v0316(old, new, None, "<|user_cursor|>", "<|end|>").unwrap();
+ let stripped = encoded
+ .strip_suffix("<|end|>")
+ .expect("should have end marker");
+ let reconstructed = apply_marker_span_v0316(old, stripped).unwrap();
+ assert_eq!(reconstructed, new);
+ }
+
+ #[test]
+ fn test_roundtrip_v0316_with_cursor() {
+ let old = "aaa\nbbb\nccc\n";
+ let new = "aaa\nBBB\nccc\n";
+ let result =
+ encode_from_old_and_new_v0316(old, new, Some(5), "<|user_cursor|>", "<|end|>").unwrap();
+ assert!(result.contains("<|user_cursor|>"), "result: {result}");
+ assert!(result.contains("B<|user_cursor|>BB"), "result: {result}");
+ }
+
+ #[test]
+ fn test_roundtrip_v0316_multi_block_change() {
+ let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\n";
+ let new = "line1\nLINE2\nline3\n\nline5\nLINE6\nline7\nline8\n";
+ let encoded =
+ encode_from_old_and_new_v0316(old, new, None, "<|user_cursor|>", "<|end|>").unwrap();
+ let stripped = encoded
+ .strip_suffix("<|end|>")
+ .expect("should have end marker");
+ let reconstructed = apply_marker_span_v0316(old, stripped).unwrap();
+ assert_eq!(reconstructed, new);
+ }
+
+ #[test]
+ fn test_nearest_marker_number() {
+ let offsets = vec![0, 10, 20, 30];
+ assert_eq!(nearest_marker_number(Some(0), &offsets), 1);
+ assert_eq!(nearest_marker_number(Some(9), &offsets), 2);
+ assert_eq!(nearest_marker_number(Some(15), &offsets), 2);
+ assert_eq!(nearest_marker_number(Some(25), &offsets), 3);
+ assert_eq!(nearest_marker_number(Some(30), &offsets), 4);
+ assert_eq!(nearest_marker_number(None, &offsets), 1);
+ }
+
+ #[test]
+ fn test_marker_tag_relative_formats_as_expected() {
+ assert_eq!(marker_tag_relative(-2), "<|marker-2|>");
+ assert_eq!(marker_tag_relative(-1), "<|marker-1|>");
+ assert_eq!(marker_tag_relative(0), "<|marker-0|>");
+ assert_eq!(marker_tag_relative(1), "<|marker+1|>");
+ assert_eq!(marker_tag_relative(2), "<|marker+2|>");
+ }
+
+ #[test]
+ fn test_write_editable_with_markers_v0317_includes_relative_markers_and_cursor() {
+ let editable = "aaa\nbbb\nccc\n";
+ let mut output = String::new();
+ write_editable_with_markers_v0317(&mut output, editable, 4, "<|user_cursor|>");
+
+ assert!(output.contains("<|marker-0|>"));
+ assert!(output.contains("<|user_cursor|>"));
+
+ let stripped = output.replace("<|user_cursor|>", "");
+ let stripped =
+ collect_relative_marker_tags(&stripped)
+ .iter()
+ .fold(stripped.clone(), |acc, marker| {
+ let tag = &stripped[marker.tag_start..marker.tag_end];
+ acc.replace(tag, "")
+ });
+ assert_eq!(stripped, editable);
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0317_basic() {
+ let old = "aaa\nbbb\nccc\n";
+ let output = "<|marker-0|>aaa\nBBB\nccc\n<|marker+1|>";
+ let result = apply_marker_span_v0317(old, output, Some(0)).unwrap();
+ assert_eq!(result, "aaa\nBBB\nccc\n");
+ }
+
+ #[test]
+ fn test_apply_marker_span_v0317_no_edit() {
+ let old = "aaa\nbbb\nccc\n";
+ let output = "<|marker-0|><|marker-0|>";
+ let result = apply_marker_span_v0317(old, output, Some(0)).unwrap();
+ assert_eq!(result, old);
+ }
+
+ #[test]
+ fn test_encode_v0317_no_edits() {
+ let old = "aaa\nbbb\nccc\n";
+ let result =
+ encode_from_old_and_new_v0317(old, old, Some(5), "<|user_cursor|>", "<|end|>").unwrap();
+ assert_eq!(result, "<|marker-0|><|marker-0|><|end|>");
+ }
+
+ #[test]
+ fn test_roundtrip_v0317() {
+ let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\n";
+ let new = "line1\nLINE2\nline3\n\nline5\nLINE6\nline7\nline8\n";
+ let cursor = Some(6);
+
+ let encoded =
+ encode_from_old_and_new_v0317(old, new, cursor, "<|user_cursor|>", "<|end|>").unwrap();
+ let stripped = encoded
+ .strip_suffix("<|end|>")
+ .expect("should have end marker");
+ let stripped = stripped.replace("<|user_cursor|>", "");
+ let reconstructed = apply_marker_span_v0317(old, &stripped, cursor).unwrap();
+ assert_eq!(reconstructed, new);
+ }
+
+ #[test]
+ fn test_roundtrip_v0317_with_cursor_marker() {
+ let old = "aaa\nbbb\nccc\n";
+ let new = "aaa\nBBB\nccc\n";
+ let result =
+ encode_from_old_and_new_v0317(old, new, Some(5), "<|user_cursor|>", "<|end|>").unwrap();
+ assert!(result.contains("<|user_cursor|>"), "result: {result}");
+ assert!(result.contains("<|marker-0|>"), "result: {result}");
+ }
+
+ #[test]
+ fn test_compute_marker_offsets_v0318_uses_larger_block_sizes() {
+ let text = "l1\nl2\nl3\n\nl5\nl6\nl7\nl8\nl9\nl10\nl11\nl12\nl13\n";
+ let v0316_offsets = compute_marker_offsets(text);
+ let v0318_offsets = compute_marker_offsets_v0318(text);
+
+ assert!(v0318_offsets.len() < v0316_offsets.len());
+ assert_eq!(v0316_offsets.first().copied(), Some(0));
+ assert_eq!(v0318_offsets.first().copied(), Some(0));
+ assert_eq!(v0316_offsets.last().copied(), Some(text.len()));
+ assert_eq!(v0318_offsets.last().copied(), Some(text.len()));
+ }
+
+ #[test]
+ fn test_roundtrip_v0318() {
+ let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\nline9\nline10\n";
+ let new = "line1\nline2\nline3\n\nline5\nLINE6\nline7\nline8\nline9\nline10\n";
+ let encoded =
+ encode_from_old_and_new_v0318(old, new, None, "<|user_cursor|>", "<|end|>").unwrap();
+ let stripped = encoded
+ .strip_suffix("<|end|>")
+ .expect("should have end marker");
+ let reconstructed = apply_marker_span_v0318(old, stripped).unwrap();
+ assert_eq!(reconstructed, new);
+ }
+
+ #[test]
+ fn test_encode_v0317_markers_stay_on_line_boundaries() {
+ let old = "\
+\t\t\t\tcontinue outer;
+\t\t\t}
+\t\t}
+\t}
+
+\tconst intersectionObserver = new IntersectionObserver((entries) => {
+\t\tfor (const entry of entries) {
+\t\t\tif (entry.isIntersecting) {
+\t\t\t\tintersectionObserver.unobserve(entry.target);
+\t\t\t\tanchorPreload(/** @type {HTMLAnchorElement} */ (entry.target));
+\t\t\t}
+\t\t}
+\t});
+
+\tconst observer = new MutationObserver(() => {
+\t\tconst links = /** @type {NodeListOf<HTMLAnchorElement>} */ (
+\t\t\tdocument.querySelectorAll('a[data-preload]')
+\t\t);
+
+\t\tfor (const link of links) {
+\t\t\tif (linkSet.has(link)) continue;
+\t\t\tlinkSet.add(link);
+
+\t\t\tswitch (link.dataset.preload) {
+\t\t\t\tcase '':
+\t\t\t\tcase 'true':
+\t\t\t\tcase 'hover': {
+\t\t\t\t\tlink.addEventListener('mouseenter', function callback() {
+\t\t\t\t\t\tlink.removeEventListener('mouseenter', callback);
+\t\t\t\t\t\tanchorPreload(link);
+\t\t\t\t\t});
+";
+ let new = old.replacen(
+ "\t\t\t\tcase 'true':\n",
+ "\t\t\t\tcase 'TRUE':<|user_cursor|>\n",
+ 1,
+ );
+
+ let cursor_offset = new.find("<|user_cursor|>").expect("cursor marker in new");
+ let new_without_cursor = new.replace("<|user_cursor|>", "");
+
+ let encoded = encode_from_old_and_new_v0317(
+ old,
+ &new_without_cursor,
+ Some(cursor_offset),
+ "<|user_cursor|>",
+ "<|end|>",
+ )
+ .unwrap();
+
+ let core = encoded.strip_suffix("<|end|>").unwrap_or(&encoded);
+ for marker in collect_relative_marker_tags(core) {
+ let tag_start = marker.tag_start;
+ assert!(
+ tag_start == 0 || core.as_bytes()[tag_start - 1] == b'\n',
+ "marker not at line boundary: {} in output:\n{}",
+ marker_tag_relative(marker.value),
+ core
+ );
+ }
+ }
}
@@ -25,6 +25,11 @@ fn estimate_tokens(bytes: usize) -> usize {
bytes / 3
}
+/// Leave some slack to avoid overflow.
+fn apply_prompt_budget_margin(max_tokens: usize) -> usize {
+ (max_tokens as f64 * 0.9).floor() as usize
+}
+
#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
pub struct ZetaPromptInput {
pub cursor_path: Arc<Path>,
@@ -82,7 +87,14 @@ pub enum ZetaFormat {
v0226Hashline,
V0304VariableEdit,
V0304SeedNoEdits,
+ /// Multi-block marker spans with NO_EDITS sentinel.
V0306SeedMultiRegions,
+ /// Byte-exact marker spans; all intermediate markers emitted; repeated marker means no-edit.
+ V0316SeedMultiRegions,
+ /// V0316 with larger block sizes.
+ V0318SeedMultiRegions,
+ /// V0316, but marker numbers are relative to the cursor block (e.g. -1, -0, +1).
+ V0317SeedMultiRegions,
}
impl std::fmt::Display for ZetaFormat {
@@ -220,6 +232,42 @@ pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str]
ZetaFormat::v0226Hashline => hashline::special_tokens(),
ZetaFormat::V0304VariableEdit => v0304_variable_edit::special_tokens(),
ZetaFormat::V0304SeedNoEdits => seed_coder::special_tokens(),
+ ZetaFormat::V0316SeedMultiRegions => {
+ static TOKENS: &[&str] = &[
+ seed_coder::FIM_SUFFIX,
+ seed_coder::FIM_PREFIX,
+ seed_coder::FIM_MIDDLE,
+ seed_coder::FILE_MARKER,
+ multi_region::V0316_END_MARKER,
+ CURSOR_MARKER,
+ multi_region::MARKER_TAG_PREFIX,
+ ];
+ TOKENS
+ }
+ ZetaFormat::V0318SeedMultiRegions => {
+ static TOKENS: &[&str] = &[
+ seed_coder::FIM_SUFFIX,
+ seed_coder::FIM_PREFIX,
+ seed_coder::FIM_MIDDLE,
+ seed_coder::FILE_MARKER,
+ multi_region::V0318_END_MARKER,
+ CURSOR_MARKER,
+ multi_region::MARKER_TAG_PREFIX,
+ ];
+ TOKENS
+ }
+ ZetaFormat::V0317SeedMultiRegions => {
+ static TOKENS: &[&str] = &[
+ seed_coder::FIM_SUFFIX,
+ seed_coder::FIM_PREFIX,
+ seed_coder::FIM_MIDDLE,
+ seed_coder::FILE_MARKER,
+ multi_region::V0317_END_MARKER,
+ CURSOR_MARKER,
+ multi_region::RELATIVE_MARKER_TAG_PREFIX,
+ ];
+ TOKENS
+ }
ZetaFormat::V0306SeedMultiRegions => {
static TOKENS: &[&str] = &[
seed_coder::FIM_SUFFIX,
@@ -248,6 +296,9 @@ pub fn token_limits_for_format(format: ZetaFormat) -> (usize, usize) {
| ZetaFormat::V0211SeedCoder
| ZetaFormat::v0226Hashline
| ZetaFormat::V0306SeedMultiRegions
+ | ZetaFormat::V0316SeedMultiRegions
+ | ZetaFormat::V0318SeedMultiRegions
+ | ZetaFormat::V0317SeedMultiRegions
| ZetaFormat::V0304SeedNoEdits => (350, 150),
ZetaFormat::V0304VariableEdit => (1024, 0),
}
@@ -266,6 +317,9 @@ pub fn stop_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] {
| ZetaFormat::V0304VariableEdit
| ZetaFormat::V0306SeedMultiRegions
| ZetaFormat::V0304SeedNoEdits => &[],
+ ZetaFormat::V0316SeedMultiRegions => &[multi_region::V0316_END_MARKER],
+ ZetaFormat::V0318SeedMultiRegions => &[multi_region::V0318_END_MARKER],
+ ZetaFormat::V0317SeedMultiRegions => &[multi_region::V0317_END_MARKER],
}
}
@@ -288,7 +342,10 @@ pub fn excerpt_ranges_for_format(
| ZetaFormat::V0211SeedCoder
| ZetaFormat::v0226Hashline
| ZetaFormat::V0304SeedNoEdits
- | ZetaFormat::V0306SeedMultiRegions => (
+ | ZetaFormat::V0306SeedMultiRegions
+ | ZetaFormat::V0316SeedMultiRegions
+ | ZetaFormat::V0318SeedMultiRegions
+ | ZetaFormat::V0317SeedMultiRegions => (
ranges.editable_350.clone(),
ranges.editable_350_context_150.clone(),
),
@@ -371,6 +428,30 @@ pub fn write_cursor_excerpt_section_for_format(
cursor_offset,
));
}
+ ZetaFormat::V0316SeedMultiRegions => {
+ prompt.push_str(&build_v0316_cursor_prefix(
+ path,
+ context,
+ editable_range,
+ cursor_offset,
+ ));
+ }
+ ZetaFormat::V0318SeedMultiRegions => {
+ prompt.push_str(&build_v0318_cursor_prefix(
+ path,
+ context,
+ editable_range,
+ cursor_offset,
+ ));
+ }
+ ZetaFormat::V0317SeedMultiRegions => {
+ prompt.push_str(&build_v0317_cursor_prefix(
+ path,
+ context,
+ editable_range,
+ cursor_offset,
+ ));
+ }
}
}
@@ -403,6 +484,87 @@ fn build_v0306_cursor_prefix(
section
}
+fn build_v0316_cursor_prefix(
+ path: &Path,
+ context: &str,
+ editable_range: &Range<usize>,
+ cursor_offset: usize,
+) -> String {
+ let mut section = String::new();
+ let path_str = path.to_string_lossy();
+ write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok();
+
+ section.push_str(&context[..editable_range.start]);
+
+ let editable_text = &context[editable_range.clone()];
+ let cursor_in_editable = cursor_offset - editable_range.start;
+ multi_region::write_editable_with_markers_v0316(
+ &mut section,
+ editable_text,
+ cursor_in_editable,
+ CURSOR_MARKER,
+ );
+
+ if !section.ends_with('\n') {
+ section.push('\n');
+ }
+ section
+}
+
+fn build_v0318_cursor_prefix(
+ path: &Path,
+ context: &str,
+ editable_range: &Range<usize>,
+ cursor_offset: usize,
+) -> String {
+ let mut section = String::new();
+ let path_str = path.to_string_lossy();
+ write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok();
+
+ section.push_str(&context[..editable_range.start]);
+
+ let editable_text = &context[editable_range.clone()];
+ let cursor_in_editable = cursor_offset - editable_range.start;
+ multi_region::write_editable_with_markers_v0318(
+ &mut section,
+ editable_text,
+ cursor_in_editable,
+ CURSOR_MARKER,
+ );
+
+ if !section.ends_with('\n') {
+ section.push('\n');
+ }
+ section
+}
+
+fn build_v0317_cursor_prefix(
+ path: &Path,
+ context: &str,
+ editable_range: &Range<usize>,
+ cursor_offset: usize,
+) -> String {
+ let mut section = String::new();
+ let path_str = path.to_string_lossy();
+ write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok();
+
+ section.push_str(&context[..editable_range.start]);
+
+ let editable_text = &context[editable_range.clone()];
+ let cursor_in_editable = cursor_offset - editable_range.start;
+ multi_region::write_editable_with_markers_v0317(
+ &mut section,
+ editable_text,
+ cursor_in_editable,
+ CURSOR_MARKER,
+ );
+
+ if !section.ends_with('\n') {
+ section.push('\n');
+ }
+ section
+}
+
fn offset_range_to_row_range(text: &str, range: Range<usize>) -> Range<u32> {
let start_row = text[0..range.start].matches('\n').count() as u32;
let mut end_row = start_row + text[range.clone()].matches('\n').count() as u32;
@@ -439,7 +601,10 @@ pub fn format_prompt_with_budget_for_format(
let prompt = match format {
ZetaFormat::V0211SeedCoder
| ZetaFormat::V0304SeedNoEdits
- | ZetaFormat::V0306SeedMultiRegions => {
+ | ZetaFormat::V0306SeedMultiRegions
+ | ZetaFormat::V0316SeedMultiRegions
+ | ZetaFormat::V0318SeedMultiRegions
+ | ZetaFormat::V0317SeedMultiRegions => {
let mut cursor_section = String::new();
write_cursor_excerpt_section_for_format(
format,
@@ -450,13 +615,14 @@ pub fn format_prompt_with_budget_for_format(
cursor_offset,
);
+ let budget_with_margin = apply_prompt_budget_margin(max_tokens);
seed_coder::assemble_fim_prompt(
context,
&editable_range,
&cursor_section,
&input.events,
related_files,
- max_tokens,
+ budget_with_margin,
)
}
_ => {
@@ -470,24 +636,25 @@ pub fn format_prompt_with_budget_for_format(
cursor_offset,
);
+ let mut remaining_budget = apply_prompt_budget_margin(max_tokens);
let cursor_tokens = estimate_tokens(cursor_section.len());
- let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens);
+ remaining_budget = remaining_budget.saturating_sub(cursor_tokens);
let edit_history_section = format_edit_history_within_budget(
&input.events,
"<|file_sep|>",
"edit history",
- budget_after_cursor,
+ remaining_budget,
max_edit_event_count_for_format(&format),
);
let edit_history_tokens = estimate_tokens(edit_history_section.len());
- let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens);
+ remaining_budget = remaining_budget.saturating_sub(edit_history_tokens);
let related_files_section = format_related_files_within_budget(
&related_files,
"<|file_sep|>",
"",
- budget_after_edit_history,
+ remaining_budget,
);
let mut prompt = String::new();
@@ -533,7 +700,10 @@ pub fn max_edit_event_count_for_format(format: &ZetaFormat) -> usize {
| ZetaFormat::v0226Hashline
| ZetaFormat::V0304SeedNoEdits
| ZetaFormat::V0304VariableEdit
- | ZetaFormat::V0306SeedMultiRegions => 6,
+ | ZetaFormat::V0306SeedMultiRegions
+ | ZetaFormat::V0316SeedMultiRegions
+ | ZetaFormat::V0318SeedMultiRegions
+ | ZetaFormat::V0317SeedMultiRegions => 6,
}
}
@@ -552,7 +722,11 @@ pub fn get_prefill_for_format(
| ZetaFormat::V0211SeedCoder
| ZetaFormat::v0226Hashline
| ZetaFormat::V0304VariableEdit => String::new(),
- ZetaFormat::V0304SeedNoEdits | ZetaFormat::V0306SeedMultiRegions => String::new(),
+ ZetaFormat::V0304SeedNoEdits
+ | ZetaFormat::V0306SeedMultiRegions
+ | ZetaFormat::V0316SeedMultiRegions
+ | ZetaFormat::V0318SeedMultiRegions
+ | ZetaFormat::V0317SeedMultiRegions => String::new(),
}
}
@@ -564,6 +738,9 @@ pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str>
ZetaFormat::V0211SeedCoder
| ZetaFormat::V0304SeedNoEdits
| ZetaFormat::V0306SeedMultiRegions => Some(seed_coder::END_MARKER),
+ ZetaFormat::V0316SeedMultiRegions => Some(multi_region::V0316_END_MARKER),
+ ZetaFormat::V0318SeedMultiRegions => Some(multi_region::V0318_END_MARKER),
+ ZetaFormat::V0317SeedMultiRegions => Some(multi_region::V0317_END_MARKER),
ZetaFormat::V0112MiddleAtEnd
| ZetaFormat::V0113Ordered
| ZetaFormat::V0114180EditableRegion
@@ -591,6 +768,49 @@ pub fn encode_patch_as_output_for_format(
ZetaFormat::V0304SeedNoEdits | ZetaFormat::V0306SeedMultiRegions => {
Ok(seed_coder::no_edits(patch))
}
+ ZetaFormat::V0316SeedMultiRegions => {
+ let empty_patch = patch.lines().count() <= 3;
+ if empty_patch {
+ let marker_offsets = multi_region::compute_marker_offsets(old_editable_region);
+ let marker_num =
+ multi_region::nearest_marker_number(cursor_offset, &marker_offsets);
+ let tag = multi_region::marker_tag(marker_num);
+ Ok(Some(format!(
+ "{tag}{tag}{}",
+ multi_region::V0316_END_MARKER
+ )))
+ } else {
+ Ok(None)
+ }
+ }
+ ZetaFormat::V0318SeedMultiRegions => {
+ let empty_patch = patch.lines().count() <= 3;
+ if empty_patch {
+ let marker_offsets =
+ multi_region::compute_marker_offsets_v0318(old_editable_region);
+ let marker_num =
+ multi_region::nearest_marker_number(cursor_offset, &marker_offsets);
+ let tag = multi_region::marker_tag(marker_num);
+ Ok(Some(format!(
+ "{tag}{tag}{}",
+ multi_region::V0318_END_MARKER
+ )))
+ } else {
+ Ok(None)
+ }
+ }
+ ZetaFormat::V0317SeedMultiRegions => {
+ let empty_patch = patch.lines().count() <= 3;
+ if empty_patch {
+ let tag = multi_region::marker_tag_relative(0);
+ Ok(Some(format!(
+ "{tag}{tag}{}",
+ multi_region::V0317_END_MARKER
+ )))
+ } else {
+ Ok(None)
+ }
+ }
_ => Ok(None),
}
}
@@ -613,10 +833,11 @@ pub fn parse_zeta2_model_output(
None => output,
};
- let (context, editable_range_in_context, context_range, _) =
+ let (context, editable_range_in_context, context_range, cursor_offset) =
resolve_cursor_region(prompt_inputs, format);
let context_start = context_range.start;
let old_editable_region = &context[editable_range_in_context.clone()];
+ let cursor_offset_in_editable = cursor_offset.saturating_sub(editable_range_in_context.start);
let (range_in_context, output) = match format {
ZetaFormat::v0226Hashline => (
@@ -644,6 +865,22 @@ pub fn parse_zeta2_model_output(
multi_region::apply_marker_span(old_editable_region, output)?
},
),
+ ZetaFormat::V0316SeedMultiRegions => (
+ editable_range_in_context,
+ multi_region::apply_marker_span_v0316(old_editable_region, output)?,
+ ),
+ ZetaFormat::V0318SeedMultiRegions => (
+ editable_range_in_context,
+ multi_region::apply_marker_span_v0318(old_editable_region, output)?,
+ ),
+ ZetaFormat::V0317SeedMultiRegions => (
+ editable_range_in_context,
+ multi_region::apply_marker_span_v0317(
+ old_editable_region,
+ output,
+ Some(cursor_offset_in_editable),
+ )?,
+ ),
_ => (editable_range_in_context, output.to_string()),
};
@@ -4097,6 +4334,10 @@ mod tests {
format_prompt_with_budget_for_format(input, ZetaFormat::V0114180EditableRegion, max_tokens)
}
+ fn budget_with_margin(requested_tokens: usize) -> usize {
+ ((requested_tokens as f64) / 0.9).ceil() as usize
+ }
+
#[test]
fn test_no_truncation_when_within_budget() {
let input = make_input(
@@ -4167,7 +4408,7 @@ mod tests {
);
assert_eq!(
- format_with_budget(&input, 55),
+ format_with_budget(&input, budget_with_margin(55)),
Some(
indoc! {r#"
<|file_sep|>edit history
@@ -4238,7 +4479,7 @@ mod tests {
);
assert_eq!(
- format_with_budget(&input, 50).unwrap(),
+ format_with_budget(&input, budget_with_margin(50)).unwrap(),
indoc! {r#"
<|file_sep|>big.rs
first excerpt
@@ -4310,7 +4551,7 @@ mod tests {
// file_b header (7) + excerpt (7) = 14 tokens, which fits.
// file_a would need another 14 tokens, which doesn't fit.
assert_eq!(
- format_with_budget(&input, 52).unwrap(),
+ format_with_budget(&input, budget_with_margin(52)).unwrap(),
indoc! {r#"
<|file_sep|>file_b.rs
high priority content
@@ -4383,7 +4624,7 @@ mod tests {
// With tight budget, only order<=1 excerpts included (header + important fn).
assert_eq!(
- format_with_budget(&input, 55).unwrap(),
+ format_with_budget(&input, budget_with_margin(55)).unwrap(),
indoc! {r#"
<|file_sep|>mod.rs
mod header
@@ -4507,6 +4748,34 @@ mod tests {
);
}
+ #[test]
+ fn test_v0317_formats_prompt_with_many_related_files() {
+ let related_files = (0..900)
+ .map(|index| {
+ make_related_file(
+ &format!("related_{index}.rs"),
+ "fn helper() {\n let value = 1;\n}\n",
+ )
+ })
+ .collect();
+
+ let input = make_input(
+ "code",
+ 0..4,
+ 2,
+ vec![make_event("a.rs", "-x\n+y\n")],
+ related_files,
+ );
+
+ let prompt =
+ format_prompt_with_budget_for_format(&input, ZetaFormat::V0317SeedMultiRegions, 4096);
+
+ assert!(prompt.is_some());
+ let prompt = prompt.expect("v0317 should produce a prompt under high related-file count");
+ assert!(prompt.contains("test.rs"));
+ assert!(prompt.contains(CURSOR_MARKER));
+ }
+
#[test]
fn test_seed_coder_no_context() {
let input = make_input("before\nmiddle\nafter", 7..13, 10, vec![], vec![]);
@@ -1,580 +0,0 @@
-# Plan: Show ACP Threads in the Sidebar (Revised)
-
-## Problem
-
-The sidebar currently only shows **Zed-native agent threads** (from `ThreadStore`/`ThreadsDatabase`). ACP threads (Claude Code, Codex, Gemini, etc.) are invisible in the sidebar once they're no longer live.
-
-## Root Cause
-
-`ThreadStore` and `ThreadsDatabase` only persist metadata for native threads. When `rebuild_contents` populates the sidebar, it reads from `ThreadStore` for historical threads and overlays live info from the `AgentPanel` — but non-native threads never get written to `ThreadStore`, so once they stop being live, they disappear.
-
-## Solution Overview (Revised)
-
-**Key change from the original plan:** We completely remove the sidebar's dependency on `ThreadStore`. Instead, the `Sidebar` itself owns a **single, unified persistence layer** — a new `SidebarDb` domain stored in the workspace DB — that tracks metadata for _all_ thread types (native and ACP). The sidebar becomes the single source of truth for what threads appear in the list.
-
-### Why Remove the ThreadStore Dependency?
-
-1. **Single responsibility** — The sidebar is the only consumer of "which threads to show in the list." Having it depend on `ThreadStore` (which exists primarily for native agent save/load) creates an indirect coupling that makes ACP integration awkward.
-2. **No merge logic** — The original plan required merging native `ThreadStore` data with a separate `AcpThreadMetadataDb` in `ThreadStore::reload`. By moving all sidebar metadata into one place, there's nothing to merge.
-3. **Simpler data flow** — Writers (native agent, ACP connections) push metadata to the sidebar DB. The sidebar reads from one table. No cross-crate coordination needed.
-4. **ThreadStore stays focused** — `ThreadStore` continues to manage native thread blob storage (save/load message data) without being polluted with sidebar display concerns.
-
-### Architecture
-
-```
- ┌─────────────────────┐ ┌─────────────────────────┐
- │ NativeAgent │ │ ACP Connections │
- │ (on save_thread) │ │ (on create/update/list) │
- └──────────┬──────────┘ └──────────┬──────────────┘
- │ │
- │ save_sidebar_thread() │
- └──────────┬─────────────────┘
- ▼
- ┌───────────────────┐
- │ SidebarDb │
- │ (workspace DB) │
- │ sidebar_threads │
- └────────┬──────────┘
- │
- ▼
- ┌───────────────────┐
- │ Sidebar │
- │ rebuild_contents │
- └───────────────────┘
-```
-
----
-
-## Step 1: Create `SidebarDb` Domain in `sidebar.rs`
-
-**File:** `crates/agent_ui/src/sidebar.rs`
-
-Add a `SidebarDb` domain using `db::static_connection!`, co-located in the sidebar module (or a small `persistence` submodule within `sidebar.rs` if it helps organization, but keeping it in the same file is fine for now).
-
-### Schema
-
-```rust
-use db::{
- sqlez::{
- bindable::Column, domain::Domain, statement::Statement,
- thread_safe_connection::ThreadSafeConnection,
- },
- sqlez_macros::sql,
-};
-
-/// Lightweight metadata for any thread (native or ACP), enough to populate
-/// the sidebar list and route to the correct load path when clicked.
-#[derive(Debug, Clone)]
-pub struct SidebarThreadRow {
- pub session_id: acp::SessionId,
- /// `None` for native Zed threads, `Some("claude-code")` etc. for ACP agents.
- pub agent_name: Option<String>,
- pub title: SharedString,
- pub updated_at: DateTime<Utc>,
- pub created_at: Option<DateTime<Utc>>,
- pub folder_paths: PathList,
-}
-
-pub struct SidebarDb(ThreadSafeConnection);
-
-impl Domain for SidebarDb {
- const NAME: &str = stringify!(SidebarDb);
-
- const MIGRATIONS: &[&str] = &[sql!(
- CREATE TABLE IF NOT EXISTS sidebar_threads(
- session_id TEXT PRIMARY KEY,
- agent_name TEXT,
- title TEXT NOT NULL,
- updated_at TEXT NOT NULL,
- created_at TEXT,
- folder_paths TEXT,
- folder_paths_order TEXT
- ) STRICT;
- )];
-}
-
-db::static_connection!(SIDEBAR_DB, SidebarDb, []);
-```
-
-### CRUD Methods
-
-```rust
-impl SidebarDb {
- /// Upsert metadata for a thread (native or ACP).
- pub async fn save(&self, row: &SidebarThreadRow) -> Result<()> {
- let id = row.session_id.0.clone();
- let agent_name = row.agent_name.clone();
- let title = row.title.to_string();
- let updated_at = row.updated_at.to_rfc3339();
- let created_at = row.created_at.map(|dt| dt.to_rfc3339());
- let serialized = row.folder_paths.serialize();
- let (fp, fpo) = if row.folder_paths.is_empty() {
- (None, None)
- } else {
- (Some(serialized.paths), Some(serialized.order))
- };
-
- self.write(move |conn| {
- let mut stmt = Statement::prepare(
- conn,
- "INSERT INTO sidebar_threads(session_id, agent_name, title, updated_at, created_at, folder_paths, folder_paths_order)
- VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)
- ON CONFLICT(session_id) DO UPDATE SET
- agent_name = excluded.agent_name,
- title = excluded.title,
- updated_at = excluded.updated_at,
- folder_paths = excluded.folder_paths,
- folder_paths_order = excluded.folder_paths_order",
- )?;
- let mut i = stmt.bind(&id, 1)?;
- i = stmt.bind(&agent_name, i)?;
- i = stmt.bind(&title, i)?;
- i = stmt.bind(&updated_at, i)?;
- i = stmt.bind(&created_at, i)?;
- i = stmt.bind(&fp, i)?;
- stmt.bind(&fpo, i)?;
- stmt.exec()
- })
- .await
- }
-
- /// List all sidebar thread metadata, ordered by updated_at descending.
- pub fn list(&self) -> Result<Vec<SidebarThreadRow>> {
- self.select::<SidebarThreadRow>(
- "SELECT session_id, agent_name, title, updated_at, created_at, folder_paths, folder_paths_order
- FROM sidebar_threads
- ORDER BY updated_at DESC"
- )?(())
- }
-
- /// List threads for a specific folder path set.
- pub fn list_for_paths(&self, paths: &PathList) -> Result<Vec<SidebarThreadRow>> {
- let serialized = paths.serialize();
- self.select_bound::<String, SidebarThreadRow>(sql!(
- SELECT session_id, agent_name, title, updated_at, created_at, folder_paths, folder_paths_order
- FROM sidebar_threads
- WHERE folder_paths = ?
- ORDER BY updated_at DESC
- ))?(serialized.paths)
- }
-
- /// Look up a single thread by session ID.
- pub fn get(&self, session_id: &acp::SessionId) -> Result<Option<SidebarThreadRow>> {
- let id = session_id.0.clone();
- self.select_row_bound::<Arc<str>, SidebarThreadRow>(sql!(
- SELECT session_id, agent_name, title, updated_at, created_at, folder_paths, folder_paths_order
- FROM sidebar_threads
- WHERE session_id = ?
- ))?(id)
- }
-
- /// Return the total number of rows in the table.
- pub fn count(&self) -> Result<usize> {
- let count: (i32, i32) = self.select_row(sql!(
- SELECT COUNT(*) FROM sidebar_threads
- ))?(())?.unwrap_or_default();
- Ok(count.0 as usize)
- }
-
- /// Delete metadata for a single thread.
- pub async fn delete(&self, session_id: acp::SessionId) -> Result<()> {
- let id = session_id.0;
- self.write(move |conn| {
- let mut stmt = Statement::prepare(
- conn,
- "DELETE FROM sidebar_threads WHERE session_id = ?",
- )?;
- stmt.bind(&id, 1)?;
- stmt.exec()
- })
- .await
- }
-
- /// Delete all thread metadata.
- pub async fn delete_all(&self) -> Result<()> {
- self.write(move |conn| {
- let mut stmt = Statement::prepare(
- conn,
- "DELETE FROM sidebar_threads",
- )?;
- stmt.exec()
- })
- .await
- }
-}
-```
-
-### `Column` Implementation
-
-```rust
-impl Column for SidebarThreadRow {
- fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
- let (id, next): (Arc<str>, i32) = Column::column(statement, start_index)?;
- let (agent_name, next): (Option<String>, i32) = Column::column(statement, next)?;
- let (title, next): (String, i32) = Column::column(statement, next)?;
- let (updated_at_str, next): (String, i32) = Column::column(statement, next)?;
- let (created_at_str, next): (Option<String>, i32) = Column::column(statement, next)?;
- let (folder_paths_str, next): (Option<String>, i32) = Column::column(statement, next)?;
- let (folder_paths_order_str, next): (Option<String>, i32) = Column::column(statement, next)?;
-
- let updated_at = DateTime::parse_from_rfc3339(&updated_at_str)?.with_timezone(&Utc);
- let created_at = created_at_str
- .as_deref()
- .map(DateTime::parse_from_rfc3339)
- .transpose()?
- .map(|dt| dt.with_timezone(&Utc));
-
- let folder_paths = folder_paths_str
- .map(|paths| {
- PathList::deserialize(&util::path_list::SerializedPathList {
- paths,
- order: folder_paths_order_str.unwrap_or_default(),
- })
- })
- .unwrap_or_default();
-
- Ok((
- SidebarThreadRow {
- session_id: acp::SessionId::new(id),
- agent_name,
- title: title.into(),
- updated_at,
- created_at,
- folder_paths,
- },
- next,
- ))
- }
-}
-```
-
-**Key points:**
-
-- `SIDEBAR_DB` is a `LazyLock` static — initialized on first use, no manual connection management.
-- The `agent_name` column is `NULL` for native Zed threads and a string like `"claude-code"` for ACP agents. This replaces the `agent_type` field from the original plan.
-- The DB file lives alongside other `static_connection!` databases.
-- `ThreadsDatabase` and `ThreadStore` are **completely unchanged** by this step.
-
----
-
-## Step 2: Replace `ThreadStore` Reads in `rebuild_contents` with `SidebarDb` Reads
-
-**File:** `crates/agent_ui/src/sidebar.rs`
-
-### Remove `ThreadStore` Dependency
-
-1. **Remove** `ThreadStore::global(cx)` and `ThreadStore::try_global(cx)` from `Sidebar::new` and `rebuild_contents`.
-2. **Remove** the `cx.observe_in(&thread_store, ...)` subscription that triggers `update_entries` when `ThreadStore` changes.
-3. **Replace** `thread_store.read(cx).threads_for_paths(&path_list)` calls with `SIDEBAR_DB.list_for_paths(&path_list)` (or read all rows once at the top of `rebuild_contents` and index them in memory, which is simpler and avoids repeated DB calls).
-
-### New Data Flow in `rebuild_contents`
-
-```rust
-fn rebuild_contents(&mut self, cx: &App) {
- // ... existing workspace iteration setup ...
-
- // Read ALL sidebar thread metadata once, index by folder_paths.
- let all_sidebar_threads = SIDEBAR_DB.list().unwrap_or_default();
- let mut threads_by_paths: HashMap<PathList, Vec<SidebarThreadRow>> = HashMap::new();
- for row in all_sidebar_threads {
- threads_by_paths
- .entry(row.folder_paths.clone())
- .or_default()
- .push(row);
- }
-
- for (ws_index, workspace) in workspaces.iter().enumerate() {
- // ... existing absorbed-workspace logic ...
-
- let path_list = workspace_path_list(workspace, cx);
-
- if should_load_threads {
- let mut seen_session_ids: HashSet<acp::SessionId> = HashSet::new();
-
- // Read from SidebarDb instead of ThreadStore
- if let Some(rows) = threads_by_paths.get(&path_list) {
- for row in rows {
- seen_session_ids.insert(row.session_id.clone());
- let (agent, icon) = match &row.agent_name {
- None => (Agent::NativeAgent, IconName::ZedAgent),
- Some(name) => (
- Agent::Custom { name: name.clone().into() },
- IconName::ZedAgent, // placeholder, resolved in Step 5
- ),
- };
- threads.push(ThreadEntry {
- agent,
- session_info: AgentSessionInfo {
- session_id: row.session_id.clone(),
- cwd: None,
- title: Some(row.title.clone()),
- updated_at: Some(row.updated_at),
- created_at: row.created_at,
- meta: None,
- },
- icon,
- icon_from_external_svg: None,
- status: AgentThreadStatus::default(),
- workspace: ThreadEntryWorkspace::Open(workspace.clone()),
- is_live: false,
- is_background: false,
- highlight_positions: Vec::new(),
- worktree_name: None,
- worktree_highlight_positions: Vec::new(),
- diff_stats: DiffStats::default(),
- });
- }
- }
-
- // ... existing linked git worktree logic, also reading from threads_by_paths ...
- // ... existing live thread overlay logic (unchanged) ...
- }
- }
-}
-```
-
-### What Changes
-
-- `rebuild_contents` reads from `SIDEBAR_DB` instead of `ThreadStore`.
-- The `ThreadEntry.agent` field now carries `Agent::Custom { name }` for ACP threads, enabling correct routing in `activate_thread`.
-- The live thread overlay logic (from `all_thread_infos_for_workspace`) is **unchanged** — it still reads from `AgentPanel` to get real-time status of running threads.
-
-### What Stays the Same
-
-- The entire workspace/absorbed-workspace/git-worktree structure.
-- The live thread overlay pass.
-- The notification tracking logic.
-- The search/filter logic.
-
----
-
-## Step 3: Write Native Thread Metadata to `SidebarDb`
-
-**File:** `crates/agent_ui/src/sidebar.rs` and/or `crates/agent_ui/src/agent_panel.rs`
-
-When a native thread is saved (after conversation, on title update, etc.), we also write its metadata to `SidebarDb`. There are two approaches:
-
-### Option A: Subscribe to `ThreadStore` Changes (Recommended)
-
-Keep a one-directional sync: when `ThreadStore` finishes a `save_thread` or `reload`, the sidebar syncs the metadata to `SidebarDb`. This can be done in the sidebar's workspace subscription or by observing `ThreadStore` changes purely for the purpose of syncing (not for reading).
-
-```rust
-// In Sidebar::subscribe_to_workspace or a dedicated sync method:
-fn sync_native_threads_to_sidebar_db(&self, cx: &App) {
- if let Some(thread_store) = ThreadStore::try_global(cx) {
- let entries: Vec<_> = thread_store.read(cx).entries().collect();
- cx.background_spawn(async move {
- for meta in entries {
- SIDEBAR_DB.save(&SidebarThreadRow {
- session_id: meta.id,
- agent_name: None, // native
- title: meta.title,
- updated_at: meta.updated_at,
- created_at: meta.created_at,
- folder_paths: meta.folder_paths,
- }).await.log_err();
- }
- }).detach();
- }
-}
-```
-
-### Option B: Write at the Point of Save
-
-In `AgentPanel` or wherever `thread_store.save_thread()` is called, also call `SIDEBAR_DB.save(...)`. This is more direct but requires touching more call sites.
-
-**Recommendation:** Option A is simpler for the initial implementation. We observe `ThreadStore` changes, diff against `SidebarDb`, and sync. Later, if we want to remove `ThreadStore` entirely from the write path for native threads, we can switch to Option B.
-
----
-
-## Step 4: Write ACP Thread Metadata to `SidebarDb`
-
-**File:** `crates/agent_ui/src/connection_view.rs` (or `agent_panel.rs`)
-
-When ACP sessions are created, updated, or listed, write metadata directly to `SidebarDb`:
-
-- **On new session creation:** After `connection.new_session()` returns the `AcpThread`, call `SIDEBAR_DB.save(...)`.
-- **On title update:** ACP threads receive title updates via `SessionInfoUpdate`. When these come in, call `SIDEBAR_DB.save(...)` with the new title and updated timestamp.
-- **On session list refresh:** When `AgentSessionList::list_sessions` returns for an ACP agent, bulk-sync the metadata into `SidebarDb`.
-
-After any write, call `cx.notify()` on the `Sidebar` entity (or use a channel/event) to trigger a `rebuild_contents`.
-
-### Triggering Sidebar Refresh
-
-Since the sidebar no longer observes `ThreadStore`, we need a mechanism to trigger `rebuild_contents` after DB writes. Options:
-
-1. **Emit an event from `AgentPanel`** — The sidebar already subscribes to `AgentPanelEvent`. Add a new variant like `AgentPanelEvent::ThreadMetadataChanged` and emit it after saving to `SidebarDb`.
-2. **Use `cx.notify()` directly** — If the save happens within a `Sidebar` method, just call `self.update_entries(cx)`.
-3. **Observe a lightweight signal entity** — A simple `Entity<()>` that gets notified after DB writes.
-
-**Recommendation:** Option 1 (emit from `AgentPanel`) is cleanest since the sidebar already subscribes to panel events.
-
----
-
-## Step 5: Handle Agent Icon Resolution for ACP Threads
-
-**File:** `crates/agent_ui/src/sidebar.rs`
-
-For ACP threads in the sidebar, we need the correct agent icon. The `agent_name` string stored in `SidebarDb` maps to an agent in the `AgentServerStore`, which has icon info.
-
-In `rebuild_contents`, after building the initial thread list from `SidebarDb`, resolve icons for ACP threads:
-
-```rust
-// For ACP threads, look up the icon from the agent server store
-if let Some(name) = &row.agent_name {
- if let Some(agent_server_store) = /* get from workspace */ {
- // resolve icon from agent_server_store using name
- }
-}
-```
-
----
-
-## Step 6: Handle Delete Operations Correctly
-
-**File:** `crates/agent_ui/src/sidebar.rs`
-
-When the user deletes a thread from the sidebar:
-
-- **All threads** → Delete from `SidebarDb` via `SIDEBAR_DB.delete(session_id)`.
-- **Native threads** → _Also_ delete from `ThreadStore`/`ThreadsDatabase` (to clean up the blob data).
-- **ACP threads** → Optionally notify the ACP server via `AgentSessionList::delete_session`.
-
-The `agent_name` field on `SidebarThreadRow` (or the `Agent` enum on `ThreadEntry`) tells us which path to take.
-
-When the user clears all history:
-
-```rust
-// Delete all sidebar metadata
-SIDEBAR_DB.delete_all().await?;
-// Also clear native thread blobs
-thread_store.delete_threads(cx);
-// Optionally notify ACP servers
-```
-
----
-
-## Step 7: Handle `activate_thread` Routing
-
-**File:** `crates/agent_ui/src/sidebar.rs`, `crates/agent_ui/src/agent_panel.rs`
-
-In `activate_thread`, branch on the `Agent` variant:
-
-- `Agent::NativeAgent` → Call `panel.load_agent_thread(Agent::NativeAgent, session_id, ...)` (current behavior).
-- `Agent::Custom { name }` → Call `panel.load_agent_thread(Agent::Custom { name }, session_id, ...)` so it routes to the correct `AgentConnection::load_session`.
-
-This is already partially set up — `activate_thread` takes an `Agent` parameter. The key change is that `ThreadEntry` now carries the correct `Agent` variant based on `SidebarThreadRow.agent_name`.
-
----
-
-## Step 8: Handle `activate_archived_thread` Without ThreadStore
-
-**File:** `crates/agent_ui/src/sidebar.rs`
-
-Currently, `activate_archived_thread` looks up `saved_path_list` from `ThreadStore`:
-
-```rust
-let saved_path_list = ThreadStore::try_global(cx).and_then(|thread_store| {
- thread_store
- .read(cx)
- .thread_from_session_id(&session_info.session_id)
- .map(|thread| thread.folder_paths.clone())
-});
-```
-
-Replace this with a targeted `SidebarDb::get` lookup (single-row SELECT, no full table scan):
-
-```rust
-let saved_path_list = SIDEBAR_DB
- .get(&session_info.session_id)
- .ok()
- .flatten()
- .map(|row| row.folder_paths);
-```
-
----
-
-## Step 9: Error Handling for Offline Agents
-
-When an ACP thread is clicked but the agent server is not running:
-
-- Show a toast/notification explaining the agent is offline.
-- Keep the metadata in the sidebar (don't remove it).
-- Optionally offer to start the agent server.
-
----
-
-## Step 10: Migration — Backfill Existing Native Threads
-
-On first launch after this change, the `SidebarDb` will be empty while `ThreadsDatabase` has existing native threads. We need a one-time backfill:
-
-```rust
-// In Sidebar::new or a dedicated init method:
-fn backfill_native_threads_if_needed(cx: &App) {
- if SIDEBAR_DB.count() > 0 {
- return; // Already populated
- }
-
- if let Some(thread_store) = ThreadStore::try_global(cx) {
- let entries: Vec<_> = thread_store.read(cx).entries().collect();
- cx.background_spawn(async move {
- for meta in entries {
- SIDEBAR_DB.save(&SidebarThreadRow {
- session_id: meta.id,
- agent_name: None,
- title: meta.title,
- updated_at: meta.updated_at,
- created_at: meta.created_at,
- folder_paths: meta.folder_paths,
- }).await.log_err();
- }
- }).detach();
- }
-}
-```
-
----
-
-## Summary of Files to Change
-
-| File | Changes |
-| ---------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| `crates/agent_ui/Cargo.toml` | Add `db.workspace = true`, `sqlez.workspace = true`, `sqlez_macros.workspace = true`, `chrono.workspace = true` dependencies |
-| `crates/agent_ui/src/sidebar.rs` | **Main changes.** Add `SidebarDb` domain + `SIDEBAR_DB` static + `SidebarThreadRow`. Replace all `ThreadStore` reads in `rebuild_contents` with `SidebarDb` reads. Update `activate_archived_thread`. Add native thread sync logic. Add backfill on first run. |
-| `crates/agent_ui/src/agent_panel.rs` | Emit `AgentPanelEvent::ThreadMetadataChanged` after thread saves. Potentially write ACP metadata to `SidebarDb` here. |
-| `crates/agent_ui/src/connection_view.rs` | Write ACP metadata to `SidebarDb` on session creation, title updates, and session list refreshes. |
-
-## What Is NOT Changed
-
-| File / Area | Why |
-| ------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------- |
-| `threads` table schema | No migration needed — native blob persistence is completely untouched |
-| `ThreadsDatabase` methods | `save_thread_sync`, `load_thread`, `list_threads`, `delete_thread`, `delete_threads` — all unchanged |
-| `ThreadStore` struct/methods | Stays exactly as-is. It's still used for native thread blob save/load. The sidebar just no longer reads from it for display. |
-| `NativeAgent::load_thread` / `open_thread` | These deserialize `DbThread` blobs — completely unaffected |
-| `crates/acp_thread/` | No new persistence module needed there (unlike the original plan) |
-| `crates/agent/src/db.rs` | `DbThreadMetadata` is unchanged — no `agent_type` field added |
-
-## Execution Order
-
-1. **SidebarDb domain** (Step 1) — Create `SidebarDb`, `SidebarThreadRow`, `SIDEBAR_DB` static, CRUD methods in `sidebar.rs`.
-2. **Replace reads** (Step 2) — Swap `ThreadStore` reads in `rebuild_contents` for `SidebarDb` reads.
-3. **Native write path** (Step 3) — Sync native thread metadata from `ThreadStore` into `SidebarDb`.
-4. **ACP write path** (Step 4) — Write ACP thread metadata to `SidebarDb` from connection views.
-5. **Icon resolution** (Step 5) — Resolve ACP agent icons in the sidebar.
-6. **Delete path** (Step 6) — Route deletes to `SidebarDb` + native blob cleanup + ACP server notification.
-7. **Activate routing** (Step 7) — Ensure `activate_thread` routes correctly based on `Agent` variant.
-8. **Archive fix** (Step 8) — Update `activate_archived_thread` to use `SidebarDb`.
-9. **Migration** (Step 10) — Backfill existing native threads on first run.
-10. **Polish** (Step 9) — Error handling for offline agents.
-
-## Key Differences from Original Plan
-
-| Aspect | Original Plan | Revised Plan |
-| ------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------- |
-| **Where ACP metadata lives** | New `AcpThreadMetadataDb` in `crates/acp_thread/` | `SidebarDb` in `crates/agent_ui/src/sidebar.rs` |
-| **Where sidebar reads from** | `ThreadStore` (which merges native + ACP) | `SidebarDb` directly (single source) |
-| **ThreadStore changes** | Added `agent_type` to `DbThreadMetadata`, merge logic in `reload`, new save/delete methods | **None** — ThreadStore is untouched |
-| **`crates/agent/src/db.rs` changes** | Added `agent_type: Option<String>` to `DbThreadMetadata` | **None** |
-| **Merge complexity** | Two data sources merged in `ThreadStore::reload` | No merge — one table, one read |
-| **Crate dependencies** | `acp_thread` gains `db` dependency | `agent_ui` gains `db` dependency (more natural — it's a UI persistence concern) |
@@ -5097,7 +5097,8 @@ See the [debugger page](../debugger.md) for more information about debugging sup
"collapse_untracked_diff": false,
"scrollbar": {
"show": null
- }
+ },
+ "starts_open": false
}
}
```
@@ -5112,6 +5113,7 @@ See the [debugger page](../debugger.md) for more information about debugging sup
- `sort_by_path`: Whether to sort entries in the panel by path or by status (the default)
- `collapse_untracked_diff`: Whether to collapse untracked files in the diff panel
- `scrollbar`: When to show the scrollbar in the git panel
+- `starts_open`: Whether the git panel should open on startup
## Git Worktree Directory
@@ -50,7 +50,12 @@ Zed supports ways to spawn (and rerun) commands using its integrated [terminal](
// Whether to show the task line in the output of the spawned task, defaults to `true`.
"show_summary": true,
// Whether to show the command line in the output of the spawned task, defaults to `true`.
- "show_command": true
+ "show_command": true,
+ // Which edited buffers to save before running the task:
+ // * `all` — save all edited buffers
+ // * `current` — save current buffer only
+ // * `none` — don't save any buffers
+ "save": "all"
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
// "tags": []
}
@@ -8,56 +8,10 @@ If you are looking for the Zed extension registry, see the [`zed-industries/exte
Currently, Zed includes support for a number of languages without requiring installing an extension. Those languages can be found under [`crates/languages/src`](https://github.com/zed-industries/zed/tree/main/crates/languages/src).
-Support for all other languages is done via extensions. This directory ([extensions/](https://github.com/zed-industries/zed/tree/main/extensions/)) contains a number of officially maintained extensions. These extensions use the same [zed_extension_api](https://docs.rs/zed_extension_api/latest/zed_extension_api/) available to all [Zed Extensions](https://zed.dev/extensions) for providing [language servers](https://zed.dev/docs/extensions/languages#language-servers), [tree-sitter grammars](https://zed.dev/docs/extensions/languages#grammar) and [tree-sitter queries](https://zed.dev/docs/extensions/languages#tree-sitter-queries).
+Support for all other languages is done via extensions. This directory ([extensions/](https://github.com/zed-industries/zed/tree/main/extensions/)) contains some of the officially maintained extensions. These extensions use the same [zed_extension_api](https://docs.rs/zed_extension_api/latest/zed_extension_api/) available to all [Zed Extensions](https://zed.dev/extensions) for providing [language servers](https://zed.dev/docs/extensions/languages#language-servers), [tree-sitter grammars](https://zed.dev/docs/extensions/languages#grammar) and [tree-sitter queries](https://zed.dev/docs/extensions/languages#tree-sitter-queries).
+
+You can find the other officially maintained extensions in the [zed-extensions organization](https://github.com/zed-extensions).
## Dev Extensions
See the docs for [Developing an Extension Locally](https://zed.dev/docs/extensions/developing-extensions#developing-an-extension-locally) for how to work with one of these extensions.
-
-## Updating
-
-> [!NOTE]
-> This update process is usually handled by Zed staff.
-> Community contributors should just submit a PR (step 1) and we'll take it from there.
-
-The process for updating an extension in this directory has three parts.
-
-1. Create a PR with your changes. (Merge it)
-2. Bump the extension version in:
-
- - extensions/{language_name}/extension.toml
- - extensions/{language_name}/Cargo.toml
- - Cargo.lock
-
- You can do this manually, or with a script:
-
- ```sh
- # Output the current version for a given language
- ./script/language-extension-version <langname>
-
- # Update the version in `extension.toml` and `Cargo.toml` and trigger a `cargo check`
- ./script/language-extension-version <langname> <new_version>
- ```
-
- Commit your changes to a branch, push a PR and merge it.
-
-3. Open a PR to [`zed-industries/extensions`](https://github.com/zed-industries/extensions) repo that updates the extension in question
-
-Edit [`extensions.toml`](https://github.com/zed-industries/extensions/blob/main/extensions.toml) in the extensions repo to reflect the new version you set above and update the submodule latest Zed commit.
-
-```sh
-# Go into your clone of the extensions repo
-cd ../extensions
-
-# Update
-git checkout main
-git pull
-just init-submodule extensions/zed
-
-# Update the Zed submodule
-cd extensions/zed
-git checkout main
-git pull
-cd -
-git add extensions.toml extensions/zed
-```
@@ -77,7 +77,6 @@ let
builtins.elem firstComp topLevelIncludes;
craneLib = crane.overrideToolchain rustToolchain;
- gpu-lib = if withGLES then libglvnd else vulkan-loader;
commonArgs =
let
zedCargoLock = builtins.fromTOML (builtins.readFile ../crates/zed/Cargo.toml);
@@ -179,7 +178,8 @@ let
libva
libxkbcommon
wayland
- gpu-lib
+ libglvnd
+ vulkan-loader
xorg.libX11
xorg.libxcb
libdrm
@@ -236,7 +236,8 @@ let
# about them that's special is that they're manually dlopened at runtime
NIX_LDFLAGS = lib.optionalString stdenv'.hostPlatform.isLinux "-rpath ${
lib.makeLibraryPath [
- gpu-lib
+ libglvnd
+ vulkan-loader
wayland
libva
]
@@ -245,7 +246,7 @@ let
NIX_OUTPATH_USED_AS_RANDOM_SEED = "norebuilds";
};
- # prevent nix from removing the "unused" wayland/gpu-lib rpaths
+ # prevent nix from removing the "unused" wayland rpaths
dontPatchELF = stdenv'.hostPlatform.isLinux;
# TODO: try craneLib.cargoNextest separate output
@@ -1,29 +0,0 @@
-#!/usr/bin/env bash
-
-set -euox pipefail
-
-if [ "$#" -lt 1 ]; then
- echo "Usage: $0 <language> [version]"
- exit 1
-fi
-
-LANGUAGE=$1
-VERSION=${2:-}
-
-EXTENSION_DIR="extensions/$LANGUAGE"
-EXTENSION_TOML="$EXTENSION_DIR/extension.toml"
-CARGO_TOML="$EXTENSION_DIR/Cargo.toml"
-
-if [ ! -d "$EXTENSION_DIR" ]; then
- echo "Directory $EXTENSION_DIR does not exist."
- exit 1
-fi
-
-if [ -z "$VERSION" ]; then
- grep -m 1 'version =' "$EXTENSION_TOML" | awk -F\" '{print $2}'
- exit 0
-fi
-
-sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$EXTENSION_TOML"
-sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$CARGO_TOML"
-cargo update --workspace
@@ -50,6 +50,8 @@ if [[ -n $apt ]]; then
musl-tools
musl-dev
build-essential
+ pipewire
+ xdg-desktop-portal
)
if (grep -qP 'PRETTY_NAME="(Debian|Raspbian).+13' /etc/os-release); then
# libstdc++-14-dev is in build-essential
@@ -110,6 +112,8 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then
libzstd-devel
vulkan-loader
sqlite-devel
+ pipewire
+ xdg-desktop-portal
jq
git
tar
@@ -185,6 +189,8 @@ if [[ -n $zyp ]]; then
tar
wayland-devel
xcb-util-devel
+ pipewire
+ xdg-desktop-portal
)
$maysudo "$zyp" install -y "${deps[@]}"
finalize
@@ -213,6 +219,8 @@ if [[ -n $pacman ]]; then
pkgconf
mold
sqlite
+ pipewire
+ xdg-desktop-portal
jq
git
)
@@ -244,6 +252,8 @@ if [[ -n $xbps ]]; then
vulkan-loader
mold
sqlite-devel
+ pipewire
+ xdg-desktop-portal
)
$maysudo "$xbps" -Syu "${deps[@]}"
finalize
@@ -269,6 +279,8 @@ if [[ -n $emerge ]]; then
x11-libs/libxkbcommon
sys-devel/mold
dev-db/sqlite
+ media-video/pipewire
+ sys-apps/xdg-desktop-portal
)
$maysudo "$emerge" -u "${deps[@]}"
finalize
@@ -33,7 +33,7 @@ fn style() -> NamedJob {
.add_step(steps::cache_rust_dependencies_namespace())
.map(steps::install_linux_dependencies)
.add_step(steps::cargo_fmt())
- .add_step(steps::clippy(Platform::Linux)),
+ .add_step(steps::clippy(Platform::Linux, None)),
))
}
@@ -25,6 +25,8 @@ pub(crate) fn extension_auto_bump() -> Workflow {
Push::default()
.add_branch("main")
.add_path("extensions/**")
+ .add_path("!extensions/slash-commands-example/**")
+ .add_path("!extensions/test-extension/**")
.add_path("!extensions/workflows/**")
.add_path("!extensions/*.md"),
),
@@ -433,9 +433,9 @@ fn release_action(
generated_token: &StepOutput,
) -> (Step<Use>, StepOutput) {
let step = named::uses(
- "zed-extensions",
- "update-action",
- "72da482880c2f32ec8aa6e0a0427ab92d52ae32d",
+ "huacnlee",
+ "zed-extension-action",
+ "82920ff0876879f65ffbcfa3403589114a8919c6",
)
.id("extension-update")
.add_with(("extension-name", extension_id.to_string()))
@@ -483,12 +483,22 @@ fn enable_automerge_if_staff(
return;
}
+ // Assign staff member responsible for the bump
+ const pullNumber = parseInt(prNumber);
+
+ await github.rest.issues.addAssignees({
+ owner: 'zed-industries',
+ repo: 'extensions',
+ issue_number: pullNumber,
+ assignees: [author]
+ });
+ console.log(`Assigned ${author} to PR #${prNumber} in zed-industries/extensions`);
// Get the GraphQL node ID
const { data: pr } = await github.rest.pulls.get({
owner: 'zed-industries',
repo: 'extensions',
- pull_number: parseInt(prNumber)
+ pull_number: pullNumber
});
await github.graphql(`
@@ -16,9 +16,9 @@ pub(crate) fn release() -> Workflow {
let macos_tests = run_tests::run_platform_tests_no_filter(Platform::Mac);
let linux_tests = run_tests::run_platform_tests_no_filter(Platform::Linux);
let windows_tests = run_tests::run_platform_tests_no_filter(Platform::Windows);
- let macos_clippy = run_tests::clippy(Platform::Mac);
- let linux_clippy = run_tests::clippy(Platform::Linux);
- let windows_clippy = run_tests::clippy(Platform::Windows);
+ let macos_clippy = run_tests::clippy(Platform::Mac, None);
+ let linux_clippy = run_tests::clippy(Platform::Linux, None);
+ let windows_clippy = run_tests::clippy(Platform::Windows, None);
let check_scripts = run_tests::check_scripts();
let create_draft_release = create_draft_release();
@@ -18,7 +18,7 @@ pub fn release_nightly() -> Workflow {
let style = check_style();
// run only on windows as that's our fastest platform right now.
let tests = run_platform_tests_no_filter(Platform::Windows);
- let clippy_job = clippy(Platform::Windows);
+ let clippy_job = clippy(Platform::Windows, None);
let nightly = Some(ReleaseChannel::Nightly);
let bundle = ReleaseBundleJobs {
@@ -15,7 +15,7 @@ use crate::tasks::workflows::{
};
use super::{
- runners::{self, Platform},
+ runners::{self, Arch, Platform},
steps::{self, FluentBuilder, NamedJob, named, release_job},
};
@@ -50,13 +50,16 @@ pub(crate) fn run_tests() -> Workflow {
check_style(),
should_run_tests
.and_not_in_merge_queue()
- .guard(clippy(Platform::Windows)),
+ .guard(clippy(Platform::Windows, None)),
should_run_tests
.and_not_in_merge_queue()
- .guard(clippy(Platform::Linux)),
+ .guard(clippy(Platform::Linux, None)),
should_run_tests
.and_not_in_merge_queue()
- .guard(clippy(Platform::Mac)),
+ .guard(clippy(Platform::Mac, None)),
+ should_run_tests
+ .and_not_in_merge_queue()
+ .guard(clippy(Platform::Mac, Some(Arch::X86_64))),
should_run_tests
.and_not_in_merge_queue()
.guard(run_platform_tests(Platform::Windows)),
@@ -508,7 +511,12 @@ fn check_workspace_binaries() -> NamedJob {
))
}
-pub(crate) fn clippy(platform: Platform) -> NamedJob {
+pub(crate) fn clippy(platform: Platform, arch: Option<Arch>) -> NamedJob {
+ let target = arch.map(|arch| match (platform, arch) {
+ (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin",
+ (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin",
+ _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"),
+ });
let runner = match platform {
Platform::Windows => runners::WINDOWS_DEFAULT,
Platform::Linux => runners::LINUX_DEFAULT,
@@ -526,16 +534,20 @@ pub(crate) fn clippy(platform: Platform) -> NamedJob {
platform == Platform::Linux,
steps::install_linux_dependencies,
)
+ .when_some(target, |this, target| {
+ this.add_step(steps::install_rustup_target(target))
+ })
.add_step(steps::setup_sccache(platform))
- .add_step(steps::clippy(platform))
+ .add_step(steps::clippy(platform, target))
.add_step(steps::show_sccache_stats(platform));
if platform == Platform::Linux {
job = use_clang(job);
}
- NamedJob {
- name: format!("clippy_{platform}"),
- job,
- }
+ let name = match arch {
+ Some(arch) => format!("clippy_{platform}_{arch}"),
+ None => format!("clippy_{platform}"),
+ };
+ NamedJob { name, job }
}
pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
@@ -211,13 +211,20 @@ pub fn clear_target_dir_if_large(platform: Platform) -> Step<Run> {
}
}
-pub fn clippy(platform: Platform) -> Step<Run> {
+pub fn clippy(platform: Platform, target: Option<&str>) -> Step<Run> {
match platform {
Platform::Windows => named::pwsh("./script/clippy.ps1"),
- _ => named::bash("./script/clippy"),
+ _ => match target {
+ Some(target) => named::bash(format!("./script/clippy --target {target}")),
+ None => named::bash("./script/clippy"),
+ },
}
}
+pub fn install_rustup_target(target: &str) -> Step<Run> {
+ named::bash(format!("rustup target add {target}"))
+}
+
pub fn cache_rust_dependencies_namespace() -> Step<Use> {
named::uses("namespacelabs", "nscloud-cache-action", "v1")
.add_with(("cache", "rust"))