diff --git a/.config/hakari.toml b/.config/hakari.toml index 2050065cc2d6be2a27ec012dcd125af992793eeb..8ce0b77490482ab5ff2d781fb78fd86b56959a6a 100644 --- a/.config/hakari.toml +++ b/.config/hakari.toml @@ -25,6 +25,8 @@ third-party = [ { name = "reqwest", version = "0.11.27" }, # build of remote_server should not include scap / its x11 dependency { name = "scap", git = "https://github.com/zed-industries/scap", rev = "808aa5c45b41e8f44729d02e38fd00a2fe2722e7" }, + # build of remote_server should not need to include on libalsa through rodio + { name = "rodio" }, ] [final-excludes] @@ -32,7 +34,6 @@ workspace-members = [ "zed_extension_api", # exclude all extensions - "zed_emmet", "zed_glsl", "zed_html", "zed_proto", diff --git a/.github/ISSUE_TEMPLATE/07_bug_windows_alpha.yml b/.github/ISSUE_TEMPLATE/07_bug_windows_alpha.yml new file mode 100644 index 0000000000000000000000000000000000000000..826c2b8027144d4b658108e09c79e40490c3005d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/07_bug_windows_alpha.yml @@ -0,0 +1,35 @@ +name: Bug Report (Windows Alpha) +description: Zed Windows Alpha Related Bugs +type: "Bug" +labels: ["windows"] +title: "Windows Alpha: " +body: + - type: textarea + attributes: + label: Summary + description: Describe the bug with a one-line summary, and provide detailed reproduction steps + value: | + + SUMMARY_SENTENCE_HERE + + ### Description + + Steps to trigger the problem: + 1. + 2. + 3. + + **Expected Behavior**: + **Actual Behavior**: + + validations: + required: true + - type: textarea + id: environment + attributes: + label: Zed Version and System Specs + description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"' + placeholder: | + Output of "zed: copy system specs into clipboard" + validations: + required: true diff --git a/.github/actionlint.yml b/.github/actionlint.yml index ad0954590267f6c4d0450f7fe8ce3ccf21409560..0ee6af8a1d38e005f66b79f6c548d9f79396ea35 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -24,6 +24,9 @@ self-hosted-runner: - namespace-profile-8x16-ubuntu-2204 - namespace-profile-16x32-ubuntu-2204 - namespace-profile-32x64-ubuntu-2204 + # Namespace Limited Preview + - namespace-profile-8x16-ubuntu-2004-arm-m4 + - namespace-profile-8x32-ubuntu-2004-arm-m4 # Self Hosted Runners - self-mini-macos - self-32vcpu-windows-2022 diff --git a/.github/actions/run_tests_windows/action.yml b/.github/actions/run_tests_windows/action.yml index cbe95e82c1e76894427f270162418ed230c53e43..0a550c7d32b823db22cf205cf991020182a7d3b5 100644 --- a/.github/actions/run_tests_windows/action.yml +++ b/.github/actions/run_tests_windows/action.yml @@ -20,7 +20,167 @@ runs: with: node-version: "18" + - name: Configure crash dumps + shell: powershell + run: | + # Record the start time for this CI run + $runStartTime = Get-Date + $runStartTimeStr = $runStartTime.ToString("yyyy-MM-dd HH:mm:ss") + Write-Host "CI run started at: $runStartTimeStr" + + # Save the timestamp for later use + echo "CI_RUN_START_TIME=$($runStartTime.Ticks)" >> $env:GITHUB_ENV + + # Create crash dump directory in workspace (non-persistent) + $dumpPath = "$env:GITHUB_WORKSPACE\crash_dumps" + New-Item -ItemType Directory -Force -Path $dumpPath | Out-Null + + Write-Host "Setting up crash dump detection..." + Write-Host "Workspace dump path: $dumpPath" + + # Note: We're NOT modifying registry on stateful runners + # Instead, we'll check default Windows crash locations after tests + - name: Run tests shell: powershell working-directory: ${{ inputs.working-directory }} - run: cargo nextest run --workspace --no-fail-fast + run: | + $env:RUST_BACKTRACE = "full" + + # Enable Windows debugging features + $env:_NT_SYMBOL_PATH = "srv*https://msdl.microsoft.com/download/symbols" + + # .NET crash dump environment variables (ephemeral) + $env:COMPlus_DbgEnableMiniDump = "1" + $env:COMPlus_DbgMiniDumpType = "4" + $env:COMPlus_CreateDumpDiagnostics = "1" + + cargo nextest run --workspace --no-fail-fast + + - name: Analyze crash dumps + if: always() + shell: powershell + run: | + Write-Host "Checking for crash dumps..." + + # Get the CI run start time from the environment + $runStartTime = [DateTime]::new([long]$env:CI_RUN_START_TIME) + Write-Host "Only analyzing dumps created after: $($runStartTime.ToString('yyyy-MM-dd HH:mm:ss'))" + + # Check all possible crash dump locations + $searchPaths = @( + "$env:GITHUB_WORKSPACE\crash_dumps", + "$env:LOCALAPPDATA\CrashDumps", + "$env:TEMP", + "$env:GITHUB_WORKSPACE", + "$env:USERPROFILE\AppData\Local\CrashDumps", + "C:\Windows\System32\config\systemprofile\AppData\Local\CrashDumps" + ) + + $dumps = @() + foreach ($path in $searchPaths) { + if (Test-Path $path) { + Write-Host "Searching in: $path" + $found = Get-ChildItem "$path\*.dmp" -ErrorAction SilentlyContinue | Where-Object { + $_.CreationTime -gt $runStartTime + } + if ($found) { + $dumps += $found + Write-Host " Found $($found.Count) dump(s) from this CI run" + } + } + } + + if ($dumps) { + Write-Host "Found $($dumps.Count) crash dump(s)" + + # Install debugging tools if not present + $cdbPath = "C:\Program Files (x86)\Windows Kits\10\Debuggers\x64\cdb.exe" + if (-not (Test-Path $cdbPath)) { + Write-Host "Installing Windows Debugging Tools..." + $url = "https://go.microsoft.com/fwlink/?linkid=2237387" + Invoke-WebRequest -Uri $url -OutFile winsdksetup.exe + Start-Process -Wait winsdksetup.exe -ArgumentList "/features OptionId.WindowsDesktopDebuggers /quiet" + } + + foreach ($dump in $dumps) { + Write-Host "`n==================================" + Write-Host "Analyzing crash dump: $($dump.Name)" + Write-Host "Size: $([math]::Round($dump.Length / 1MB, 2)) MB" + Write-Host "Time: $($dump.CreationTime)" + Write-Host "==================================" + + # Set symbol path + $env:_NT_SYMBOL_PATH = "srv*C:\symbols*https://msdl.microsoft.com/download/symbols" + + # Run analysis + $analysisOutput = & $cdbPath -z $dump.FullName -c "!analyze -v; ~*k; lm; q" 2>&1 | Out-String + + # Extract key information + if ($analysisOutput -match "ExceptionCode:\s*([\w]+)") { + Write-Host "Exception Code: $($Matches[1])" + if ($Matches[1] -eq "c0000005") { + Write-Host "Exception Type: ACCESS VIOLATION" + } + } + + if ($analysisOutput -match "EXCEPTION_RECORD:\s*(.+)") { + Write-Host "Exception Record: $($Matches[1])" + } + + if ($analysisOutput -match "FAULTING_IP:\s*\n(.+)") { + Write-Host "Faulting Instruction: $($Matches[1])" + } + + # Save full analysis + $analysisFile = "$($dump.FullName).analysis.txt" + $analysisOutput | Out-File -FilePath $analysisFile + Write-Host "`nFull analysis saved to: $analysisFile" + + # Print stack trace section + Write-Host "`n--- Stack Trace Preview ---" + $stackSection = $analysisOutput -split "STACK_TEXT:" | Select-Object -Last 1 + $stackLines = $stackSection -split "`n" | Select-Object -First 20 + $stackLines | ForEach-Object { Write-Host $_ } + Write-Host "--- End Stack Trace Preview ---" + } + + Write-Host "`n⚠️ Crash dumps detected! Download the 'crash-dumps' artifact for detailed analysis." + + # Copy dumps to workspace for artifact upload + $artifactPath = "$env:GITHUB_WORKSPACE\crash_dumps_collected" + New-Item -ItemType Directory -Force -Path $artifactPath | Out-Null + + foreach ($dump in $dumps) { + $destName = "$($dump.Directory.Name)_$($dump.Name)" + Copy-Item $dump.FullName -Destination "$artifactPath\$destName" + if (Test-Path "$($dump.FullName).analysis.txt") { + Copy-Item "$($dump.FullName).analysis.txt" -Destination "$artifactPath\$destName.analysis.txt" + } + } + + Write-Host "Copied $($dumps.Count) dump(s) to artifact directory" + } else { + Write-Host "No crash dumps from this CI run found" + } + + - name: Upload crash dumps + if: always() + uses: actions/upload-artifact@v4 + with: + name: crash-dumps-${{ github.run_id }}-${{ github.run_attempt }} + path: | + crash_dumps_collected/*.dmp + crash_dumps_collected/*.txt + if-no-files-found: ignore + retention-days: 7 + + - name: Check test results + shell: powershell + working-directory: ${{ inputs.working-directory }} + run: | + # Re-check test results to fail the job if tests failed + if ($LASTEXITCODE -ne 0) { + Write-Host "Tests failed with exit code: $LASTEXITCODE" + exit $LASTEXITCODE + } diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 84907351fe287c93c8ef0e50c66a3ba51e610ce7..f4ba227168fb9cec10e1b5e23223b48e7a4ca222 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -511,8 +511,8 @@ jobs: runs-on: - self-mini-macos if: | - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') + ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [macos_tests] env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} @@ -526,6 +526,11 @@ jobs: with: node-version: "18" + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: @@ -599,8 +604,8 @@ jobs: runs-on: - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc if: | - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') + ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [linux_tests] steps: - name: Checkout repo @@ -611,6 +616,11 @@ jobs: - name: Install Linux dependencies run: ./script/linux && ./script/install-mold 2.34.0 + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Determine version and release channel if: startsWith(github.ref, 'refs/tags/v') run: | @@ -650,7 +660,7 @@ jobs: timeout-minutes: 60 name: Linux arm64 release bundle runs-on: - - namespace-profile-32x64-ubuntu-2004-arm # ubuntu 20.04 for minimal glibc + - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc if: | startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') @@ -664,6 +674,11 @@ jobs: - name: Install Linux dependencies run: ./script/linux + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Determine version and release channel if: startsWith(github.ref, 'refs/tags/v') run: | @@ -703,10 +718,8 @@ jobs: timeout-minutes: 60 runs-on: github-8vcpu-ubuntu-2404 if: | - false && ( - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') - ) + false && ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [linux_tests] name: Build Zed on FreeBSD steps: @@ -791,6 +804,11 @@ jobs: with: clean: false + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Determine version and release channel working-directory: ${{ env.ZED_WORKSPACE }} if: ${{ startsWith(github.ref, 'refs/tags/v') }} @@ -833,3 +851,12 @@ jobs: run: gh release edit "$GITHUB_REF_NAME" --draft=false env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Create Sentry release + uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3 + env: + SENTRY_ORG: zed-dev + SENTRY_PROJECT: zed + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + with: + environment: production diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index b3500a085b6e52db45993acb7a40c40bb420a847..5d63c34edd28d7e0ab3930132867f40b8f5262e9 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -168,7 +168,7 @@ jobs: name: Create a Linux *.tar.gz bundle for ARM if: github.repository_owner == 'zed-industries' runs-on: - - namespace-profile-32x64-ubuntu-2004-arm # ubuntu 20.04 for minimal glibc + - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc needs: tests steps: - name: Checkout repo @@ -206,9 +206,6 @@ jobs: runs-on: github-8vcpu-ubuntu-2404 needs: tests name: Build Zed on FreeBSD - # env: - # MYTOKEN : ${{ secrets.MYTOKEN }} - # MYTOKEN2: "value2" steps: - uses: actions/checkout@v4 - name: Build FreeBSD remote-server @@ -243,7 +240,6 @@ jobs: bundle-nix: name: Build and cache Nix package - if: false needs: tests secrets: inherit uses: ./.github/workflows/nix.yml @@ -316,3 +312,12 @@ jobs: git config user.email github-actions@github.com git tag -f nightly git push origin nightly --force + + - name: Create Sentry release + uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3 + env: + SENTRY_ORG: zed-dev + SENTRY_PROJECT: zed + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + with: + environment: production diff --git a/.github/workflows/unit_evals.yml b/.github/workflows/unit_evals.yml index 2e03fb028f14b7a345e33365b3ffe0ba9dbfd756..c03cf8b087188f3e10a298e52a8278e63765c4f0 100644 --- a/.github/workflows/unit_evals.yml +++ b/.github/workflows/unit_evals.yml @@ -3,7 +3,7 @@ name: Run Unit Evals on: schedule: # GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. - - cron: "47 1 * * *" + - cron: "47 1 * * 2" workflow_dispatch: concurrency: diff --git a/Cargo.lock b/Cargo.lock index 0f0e78bb48aac9d8effc06b3ee0aaaddb81581bd..ddeaebd0bf656ec82a698ef66906d314b9da6ef6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6,12 +6,14 @@ version = 4 name = "acp_thread" version = "0.1.0" dependencies = [ + "action_log", "agent-client-protocol", "anyhow", - "assistant_tool", "buffer_diff", + "collections", "editor", "env_logger 0.11.8", + "file_icons", "futures 0.3.31", "gpui", "indoc", @@ -21,15 +23,46 @@ dependencies = [ "markdown", "parking_lot", "project", + "prompt_store", "rand 0.8.5", "serde", "serde_json", "settings", "smol", "tempfile", + "terminal", "ui", + "url", + "util", + "uuid", + "watch", + "workspace-hack", +] + +[[package]] +name = "action_log" +version = "0.1.0" +dependencies = [ + "anyhow", + "buffer_diff", + "clock", + "collections", + "ctor", + "futures 0.3.31", + "gpui", + "indoc", + "language", + "log", + "pretty_assertions", + "project", + "rand 0.8.5", + "serde_json", + "settings", + "text", "util", + "watch", "workspace-hack", + "zlog", ] [[package]] @@ -84,6 +117,7 @@ dependencies = [ name = "agent" version = "0.1.0" dependencies = [ + "action_log", "agent_settings", "anyhow", "assistant_context", @@ -96,7 +130,6 @@ dependencies = [ "component", "context_server", "convert_case 0.8.0", - "feature_flags", "fs", "futures 0.3.31", "git", @@ -138,9 +171,9 @@ dependencies = [ [[package]] name = "agent-client-protocol" -version = "0.0.23" +version = "0.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fad72b7b8ee4331b3a4c8d43c107e982a4725564b4ee658ae5c4e79d2b486e8" +checksum = "5f792e009ba59b137ee1db560bc37e567887ad4b5af6f32181d381fff690e2d4" dependencies = [ "anyhow", "futures 0.3.31", @@ -156,27 +189,44 @@ name = "agent2" version = "0.1.0" dependencies = [ "acp_thread", + "action_log", + "agent", "agent-client-protocol", "agent_servers", + "agent_settings", "anyhow", + "assistant_context", "assistant_tool", + "assistant_tools", + "chrono", "client", "clock", "cloud_llm_client", "collections", + "context_server", "ctor", + "db", + "editor", "env_logger 0.11.8", "fs", "futures 0.3.31", + "git", "gpui", "gpui_tokio", "handlebars 4.5.0", + "html_to_markdown", + "http_client", "indoc", "itertools 0.14.0", "language", "language_model", "language_models", "log", + "lsp", + "open", + "parking_lot", + "paths", + "portable-pty", "pretty_assertions", "project", "prompt_store", @@ -187,12 +237,25 @@ dependencies = [ "serde_json", "settings", "smol", + "sqlez", + "task", + "telemetry", + "tempfile", + "terminal", + "text", + "theme", + "tree-sitter-rust", "ui", + "unindent", "util", "uuid", "watch", + "web_search", + "which 6.0.3", "workspace-hack", "worktree", + "zlog", + "zstd", ] [[package]] @@ -200,24 +263,33 @@ name = "agent_servers" version = "0.1.0" dependencies = [ "acp_thread", + "action_log", "agent-client-protocol", + "agent_settings", "agentic-coding-protocol", "anyhow", + "client", "collections", "context_server", "env_logger 0.11.8", + "fs", "futures 0.3.31", "gpui", + "gpui_tokio", "indoc", "itertools 0.14.0", "language", + "language_model", + "language_models", "libc", "log", "nix 0.29.0", "paths", "project", "rand 0.8.5", + "reqwest_client", "schemars", + "semver", "serde", "serde_json", "settings", @@ -257,6 +329,7 @@ name = "agent_ui" version = "0.1.0" dependencies = [ "acp_thread", + "action_log", "agent", "agent-client-protocol", "agent2", @@ -290,7 +363,6 @@ dependencies = [ "gpui", "html_to_markdown", "http_client", - "indexed_docs", "indoc", "inventory", "itertools 0.14.0", @@ -338,6 +410,7 @@ dependencies = [ "ui", "ui_input", "unindent", + "url", "urlencoding", "util", "uuid", @@ -814,7 +887,6 @@ dependencies = [ "gpui", "html_to_markdown", "http_client", - "indexed_docs", "language", "pretty_assertions", "project", @@ -838,13 +910,13 @@ dependencies = [ name = "assistant_tool" version = "0.1.0" dependencies = [ + "action_log", "anyhow", "buffer_diff", "clock", "collections", "ctor", "derive_more 0.99.19", - "futures 0.3.31", "gpui", "icons", "indoc", @@ -861,7 +933,6 @@ dependencies = [ "settings", "text", "util", - "watch", "workspace", "workspace-hack", "zlog", @@ -871,6 +942,7 @@ dependencies = [ name = "assistant_tools" version = "0.1.0" dependencies = [ + "action_log", "agent_settings", "anyhow", "assistant_tool", @@ -1204,26 +1276,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "async-stripe" -version = "0.40.0" -source = "git+https://github.com/zed-industries/async-stripe?rev=3672dd4efb7181aa597bf580bf5a2f5d23db6735#3672dd4efb7181aa597bf580bf5a2f5d23db6735" -dependencies = [ - "chrono", - "futures-util", - "http-types", - "hyper 0.14.32", - "hyper-rustls 0.24.2", - "serde", - "serde_json", - "serde_path_to_error", - "serde_qs 0.10.1", - "smart-default 0.6.0", - "smol_str 0.1.24", - "thiserror 1.0.69", - "tokio", -] - [[package]] name = "async-tar" version = "0.5.0" @@ -1246,9 +1298,9 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.88" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", @@ -1327,10 +1379,11 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.19", "gpui", - "parking_lot", "rodio", + "schemars", + "serde", + "settings", "util", "workspace-hack", ] @@ -2025,12 +2078,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - [[package]] name = "base64" version = "0.21.7" @@ -3040,6 +3087,7 @@ dependencies = [ "schemars", "serde", "serde_json", + "serde_urlencoded", "settings", "sha2", "smol", @@ -3223,7 +3271,6 @@ dependencies = [ "anyhow", "assistant_context", "assistant_slash_command", - "async-stripe", "async-trait", "async-tungstenite", "audio", @@ -3239,7 +3286,6 @@ dependencies = [ "chrono", "client", "clock", - "cloud_llm_client", "collab_ui", "collections", "command_palette_hooks", @@ -3250,7 +3296,6 @@ dependencies = [ "dap_adapters", "dashmap 6.1.0", "debugger_ui", - "derive_more 0.99.19", "editor", "envy", "extension", @@ -3266,7 +3311,6 @@ dependencies = [ "http_client", "hyper 0.14.32", "indoc", - "jsonwebtoken", "language", "language_model", "livekit_api", @@ -3312,7 +3356,6 @@ dependencies = [ "telemetry_events", "text", "theme", - "thiserror 2.0.12", "time", "tokio", "toml 0.8.20", @@ -3814,7 +3857,7 @@ dependencies = [ "rustc-hash 1.1.0", "rustybuzz 0.14.1", "self_cell", - "smol_str 0.2.2", + "smol_str", "swash", "sys-locale", "ttf-parser 0.21.1", @@ -3836,7 +3879,7 @@ dependencies = [ "jni", "js-sys", "libc", - "mach2", + "mach2 0.4.2", "ndk", "ndk-context", "num-derive", @@ -3986,7 +4029,7 @@ checksum = "031ed29858d90cfdf27fe49fae28028a1f20466db97962fa2f4ea34809aeebf3" dependencies = [ "cfg-if", "libc", - "mach2", + "mach2 0.4.2", ] [[package]] @@ -3998,7 +4041,7 @@ dependencies = [ "cfg-if", "crash-context", "libc", - "mach2", + "mach2 0.4.2", "parking_lot", ] @@ -4008,8 +4051,12 @@ version = "0.1.0" dependencies = [ "crash-handler", "log", + "mach2 0.5.0", "minidumper", "paths", + "release_channel", + "serde", + "serde_json", "smol", "workspace-hack", ] @@ -6317,17 +6364,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - [[package]] name = "getrandom" version = "0.2.15" @@ -6392,6 +6428,7 @@ dependencies = [ "log", "parking_lot", "pretty_assertions", + "rand 0.8.5", "regex", "rope", "schemars", @@ -7459,6 +7496,7 @@ dependencies = [ "slotmap", "smallvec", "smol", + "stacksafe", "strum 0.27.1", "sum_tree", "taffy", @@ -7823,6 +7861,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "hound" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62adaabb884c94955b19907d60019f4e145d091c75345379e70d1ee696f7854f" + [[package]] name = "html5ever" version = "0.27.0" @@ -7924,27 +7968,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" -[[package]] -name = "http-types" -version = "2.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad" -dependencies = [ - "anyhow", - "async-channel 1.9.0", - "base64 0.13.1", - "futures-lite 1.13.0", - "http 0.2.12", - "infer", - "pin-project-lite", - "rand 0.7.3", - "serde", - "serde_json", - "serde_qs 0.8.5", - "serde_urlencoded", - "url", -] - [[package]] name = "http_client" version = "0.1.0" @@ -8378,34 +8401,6 @@ version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0263a3d970d5c054ed9312c0057b4f3bde9c0b33836d3637361d4a9e6e7a408" -[[package]] -name = "indexed_docs" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "cargo_metadata", - "collections", - "derive_more 0.99.19", - "extension", - "fs", - "futures 0.3.31", - "fuzzy", - "gpui", - "heed", - "html_to_markdown", - "http_client", - "indexmap", - "indoc", - "parking_lot", - "paths", - "pretty_assertions", - "serde", - "strum 0.27.1", - "util", - "workspace-hack", -] - [[package]] name = "indexmap" version = "2.9.0" @@ -8423,12 +8418,6 @@ version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" -[[package]] -name = "infer" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" - [[package]] name = "inherent" version = "1.0.12" @@ -9633,6 +9622,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "audio", "collections", "core-foundation 0.10.0", "core-video", @@ -9651,9 +9641,11 @@ dependencies = [ "objc", "parking_lot", "postage", + "rodio", "scap", "serde", "serde_json", + "settings", "sha2", "simplelog", "smallvec", @@ -9884,6 +9876,15 @@ dependencies = [ "libc", ] +[[package]] +name = "mach2" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea" +dependencies = [ + "libc", +] + [[package]] name = "malloc_buf" version = "0.0.6" @@ -10204,7 +10205,7 @@ dependencies = [ "num-traits", "range-map", "scroll", - "smart-default 0.7.1", + "smart-default", ] [[package]] @@ -10220,7 +10221,7 @@ dependencies = [ "goblin", "libc", "log", - "mach2", + "mach2 0.4.2", "memmap2", "memoffset", "minidump-common", @@ -11096,14 +11097,13 @@ dependencies = [ "ai_onboarding", "anyhow", "client", - "command_palette_hooks", "component", "db", "documented", "editor", - "feature_flags", "fs", "fuzzy", + "git", "gpui", "itertools 0.14.0", "language", @@ -11115,6 +11115,7 @@ dependencies = [ "schemars", "serde", "settings", + "telemetry", "theme", "ui", "util", @@ -11190,6 +11191,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", + "log", "schemars", "serde", "serde_json", @@ -13077,19 +13079,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -13111,16 +13100,6 @@ dependencies = [ "rand_core 0.9.3", ] -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - [[package]] name = "rand_chacha" version = "0.3.1" @@ -13141,15 +13120,6 @@ dependencies = [ "rand_core 0.9.3", ] -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - [[package]] name = "rand_core" version = "0.6.4" @@ -13168,15 +13138,6 @@ dependencies = [ "getrandom 0.3.2", ] -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - [[package]] name = "range-map" version = "0.2.0" @@ -13518,6 +13479,7 @@ dependencies = [ name = "remote_server" version = "0.1.0" dependencies = [ + "action_log", "anyhow", "askpass", "assistant_tool", @@ -13910,6 +13872,7 @@ checksum = "e40ecf59e742e03336be6a3d53755e789fd05a059fa22dfa0ed624722319e183" dependencies = [ "cpal", "dasp_sample", + "hound", "num-rational", "symphonia", "tracing", @@ -14829,28 +14792,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_qs" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" -dependencies = [ - "percent-encoding", - "serde", - "thiserror 1.0.69", -] - -[[package]] -name = "serde_qs" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cac3f1e2ca2fe333923a1ae72caca910b98ed0630bb35ef6f8c8517d6e81afa" -dependencies = [ - "percent-encoding", - "serde", - "thiserror 1.0.69", -] - [[package]] name = "serde_repr" version = "0.1.20" @@ -14992,8 +14933,10 @@ dependencies = [ "ui", "ui_input", "util", + "vim", "workspace", "workspace-hack", + "zed_actions", ] [[package]] @@ -15225,17 +15168,6 @@ dependencies = [ "serde", ] -[[package]] -name = "smart-default" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "133659a15339456eeeb07572eb02a91c91e9815e9cbc89566944d2c8d3efdbf6" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "smart-default" version = "0.7.1" @@ -15264,15 +15196,6 @@ dependencies = [ "futures-lite 2.6.0", ] -[[package]] -name = "smol_str" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" -dependencies = [ - "serde", -] - [[package]] name = "smol_str" version = "0.2.2" @@ -15644,6 +15567,40 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "stacker" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cddb07e32ddb770749da91081d8d0ac3a16f1a569a18b20348cd371f5dead06b" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "windows-sys 0.59.0", +] + +[[package]] +name = "stacksafe" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d9c1172965d317e87ddb6d364a040d958b40a1db82b6ef97da26253a8b3d090" +dependencies = [ + "stacker", + "stacksafe-macro", +] + +[[package]] +name = "stacksafe-macro" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "172175341049678163e979d9107ca3508046d4d2a7c6682bee46ac541b17db69" +dependencies = [ + "proc-macro-error2", + "quote", + "syn 2.0.101", +] + [[package]] name = "static_assertions" version = "1.1.0" @@ -17974,6 +17931,7 @@ dependencies = [ "command_palette_hooks", "db", "editor", + "env_logger 0.11.8", "futures 0.3.31", "git_ui", "gpui", @@ -18120,12 +18078,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -18359,7 +18311,7 @@ dependencies = [ "indexmap", "libc", "log", - "mach2", + "mach2 0.4.2", "memfd", "object", "once_cell", @@ -18827,33 +18779,6 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" -[[package]] -name = "welcome" -version = "0.1.0" -dependencies = [ - "anyhow", - "client", - "component", - "db", - "documented", - "editor", - "fuzzy", - "gpui", - "install_cli", - "language", - "picker", - "project", - "serde", - "settings", - "telemetry", - "ui", - "util", - "vim_mode_setting", - "workspace", - "workspace-hack", - "zed_actions", -] - [[package]] name = "which" version = "4.4.2" @@ -20239,7 +20164,7 @@ dependencies = [ [[package]] name = "xim" version = "0.4.0" -source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd" +source = "git+https://github.com/zed-industries/xim-rs?rev=c0a70c1bd2ce197364216e5e818a2cb3adb99a8d#c0a70c1bd2ce197364216e5e818a2cb3adb99a8d" dependencies = [ "ahash 0.8.11", "hashbrown 0.14.5", @@ -20252,7 +20177,7 @@ dependencies = [ [[package]] name = "xim-ctext" version = "0.3.0" -source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd" +source = "git+https://github.com/zed-industries/xim-rs?rev=c0a70c1bd2ce197364216e5e818a2cb3adb99a8d#c0a70c1bd2ce197364216e5e818a2cb3adb99a8d" dependencies = [ "encoding_rs", ] @@ -20260,7 +20185,7 @@ dependencies = [ [[package]] name = "xim-parser" version = "0.2.1" -source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd" +source = "git+https://github.com/zed-industries/xim-rs?rev=c0a70c1bd2ce197364216e5e818a2cb3adb99a8d#c0a70c1bd2ce197364216e5e818a2cb3adb99a8d" dependencies = [ "bitflags 2.9.0", ] @@ -20336,8 +20261,9 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yawc" -version = "0.2.4" -source = "git+https://github.com/deviant-forks/yawc?rev=1899688f3e69ace4545aceb97b2a13881cf26142#1899688f3e69ace4545aceb97b2a13881cf26142" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a5d82922135b4ae73a079a4ffb5501e9aadb4d785b8c660eaa0a8b899028c5" dependencies = [ "base64 0.22.1", "bytes 1.10.1", @@ -20468,7 +20394,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.200.0" +version = "0.202.0" dependencies = [ "activity_indicator", "agent", @@ -20538,6 +20464,7 @@ dependencies = [ "language_tools", "languages", "libc", + "livekit_client", "log", "markdown", "markdown_preview", @@ -20608,7 +20535,6 @@ dependencies = [ "watch", "web_search", "web_search_providers", - "welcome", "windows 0.61.1", "winresource", "workspace", @@ -20630,13 +20556,6 @@ dependencies = [ "workspace-hack", ] -[[package]] -name = "zed_emmet" -version = "0.0.4" -dependencies = [ - "zed_extension_api 0.1.0", -] - [[package]] name = "zed_extension_api" version = "0.1.0" @@ -20871,6 +20790,7 @@ dependencies = [ "menu", "postage", "project", + "rand 0.8.5", "regex", "release_channel", "reqwest_client", diff --git a/Cargo.toml b/Cargo.toml index d547110bb4a984e868965da4d281dc394dd2fb33..b13795e1e191e45e01b45a79287c56eaf397d9fb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ resolver = "2" members = [ "crates/acp_thread", + "crates/action_log", "crates/activity_indicator", "crates/agent", "crates/agent2", @@ -80,7 +81,6 @@ members = [ "crates/http_client_tls", "crates/icons", "crates/image_viewer", - "crates/indexed_docs", "crates/edit_prediction", "crates/edit_prediction_button", "crates/inspector_ui", @@ -184,7 +184,6 @@ members = [ "crates/watch", "crates/web_search", "crates/web_search_providers", - "crates/welcome", "crates/workspace", "crates/worktree", "crates/x_ai", @@ -199,7 +198,6 @@ members = [ # Extensions # - "extensions/emmet", "extensions/glsl", "extensions/html", "extensions/proto", @@ -229,6 +227,7 @@ edition = "2024" # acp_thread = { path = "crates/acp_thread" } +action_log = { path = "crates/action_log" } agent = { path = "crates/agent" } agent2 = { path = "crates/agent2" } activity_indicator = { path = "crates/activity_indicator" } @@ -305,7 +304,6 @@ http_client = { path = "crates/http_client" } http_client_tls = { path = "crates/http_client_tls" } icons = { path = "crates/icons" } image_viewer = { path = "crates/image_viewer" } -indexed_docs = { path = "crates/indexed_docs" } edit_prediction = { path = "crates/edit_prediction" } edit_prediction_button = { path = "crates/edit_prediction_button" } inspector_ui = { path = "crates/inspector_ui" } @@ -362,6 +360,7 @@ remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } reqwest_client = { path = "crates/reqwest_client" } rich_text = { path = "crates/rich_text" } +rodio = { version = "0.21.1", default-features = false } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } rules_library = { path = "crates/rules_library" } @@ -410,7 +409,6 @@ vim_mode_setting = { path = "crates/vim_mode_setting" } watch = { path = "crates/watch" } web_search = { path = "crates/web_search" } web_search_providers = { path = "crates/web_search_providers" } -welcome = { path = "crates/welcome" } workspace = { path = "crates/workspace" } worktree = { path = "crates/worktree" } x_ai = { path = "crates/x_ai" } @@ -425,7 +423,7 @@ zlog_settings = { path = "crates/zlog_settings" } # agentic-coding-protocol = "0.0.10" -agent-client-protocol = { version = "0.0.23" } +agent-client-protocol = "0.0.30" aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" } any_vec = "0.14" @@ -517,6 +515,7 @@ libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "39f629bdd03d59abd786ed9fc27e8bca02c0c0ec" } +mach2 = "0.5" markup5ever_rcdom = "0.3.0" metal = "0.29" minidumper = "0.8" @@ -584,6 +583,7 @@ serde_json_lenient = { version = "0.2", features = [ "raw_value", ] } serde_repr = "0.1" +serde_urlencoded = "0.7" sha2 = "0.10" shellexpand = "2.1.0" shlex = "1.3.0" @@ -591,6 +591,7 @@ simplelog = "0.12.2" smallvec = { version = "1.6", features = ["union"] } smol = "2.0" sqlformat = "0.2" +stacksafe = "0.1" streaming-iterator = "0.1" strsim = "0.11" strum = { version = "0.27.0", features = ["derive"] } @@ -661,25 +662,9 @@ which = "6.0.0" windows-core = "0.61" wit-component = "0.221" workspace-hack = "0.1.0" -# We can switch back to the published version once https://github.com/infinitefield/yawc/pull/16 is merged and a new -# version is released. -yawc = { git = "https://github.com/deviant-forks/yawc", rev = "1899688f3e69ace4545aceb97b2a13881cf26142" } +yawc = "0.2.5" zstd = "0.11" -[workspace.dependencies.async-stripe] -git = "https://github.com/zed-industries/async-stripe" -rev = "3672dd4efb7181aa597bf580bf5a2f5d23db6735" -default-features = false -features = [ - "runtime-tokio-hyper-rustls", - "billing", - "checkout", - "events", - # The features below are only enabled to get the `events` feature to build. - "chrono", - "connect", -] - [workspace.dependencies.windows] version = "0.61" features = [ @@ -712,6 +697,7 @@ features = [ "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Ole", + "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_SystemInformation", "Win32_System_SystemServices", @@ -816,38 +802,33 @@ unexpected_cfgs = { level = "allow" } dbg_macro = "deny" todo = "deny" -# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so -# warning on this rule produces a lot of noise. -single_range_in_vec_init = "allow" +# This is not a style lint, see https://github.com/rust-lang/rust-clippy/pull/15454 +# Remove when the lint gets promoted to `suspicious`. +declare_interior_mutable_const = "deny" -# These are all of the rules that currently have violations in the Zed -# codebase. +redundant_clone = "deny" + +# We currently do not restrict any style rules +# as it slows down shipping code to Zed. # -# We'll want to drive this list down by either: -# 1. fixing violations of the rule and begin enforcing it -# 2. deciding we want to allow the rule permanently, at which point -# we should codify that separately above. +# Running ./script/clippy can take several minutes, and so it's +# common to skip that step and let CI do it. Any unexpected failures +# (which also take minutes to discover) thus require switching back +# to an old branch, manual fixing, and re-pushing. # -# This list shouldn't be added to; it should only get shorter. -# ============================================================================= - -# There are a bunch of rules currently failing in the `style` group, so -# allow all of those, for now. +# In the future we could improve this by either making sure +# Zed can surface clippy errors in diagnostics (in addition to the +# rust-analyzer errors), or by having CI fix style nits automatically. style = { level = "allow", priority = -1 } -# Temporary list of style lints that we've fixed so far. -module_inception = { level = "deny" } -question_mark = { level = "deny" } -redundant_closure = { level = "deny" } # Individual rules that have violations in the codebase: type_complexity = "allow" -# We often return trait objects from `new` functions. -new_ret_no_self = { level = "allow" } -# We have a few `next` functions that differ in lifetimes -# compared to Iterator::next. Yet, clippy complains about those. -should_implement_trait = { level = "allow" } let_underscore_future = "allow" +# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so +# warning on this rule produces a lot of noise. +single_range_in_vec_init = "allow" + # in Rust it can be very tedious to reduce argument count without # running afoul of the borrow checker. too_many_arguments = "allow" diff --git a/Dockerfile-collab b/Dockerfile-collab index 2dafe296c7c8bb46c758d6c5f67ce6feed055d2b..c1621d6ee67e42117315ea49eac99f6f6260f4b7 100644 --- a/Dockerfile-collab +++ b/Dockerfile-collab @@ -1,6 +1,6 @@ # syntax = docker/dockerfile:1.2 -FROM rust:1.88-bookworm as builder +FROM rust:1.89-bookworm as builder WORKDIR app COPY . . diff --git a/assets/fonts/ibm-plex-sans/IBMPlexSans-Bold.ttf b/assets/fonts/ibm-plex-sans/IBMPlexSans-Bold.ttf new file mode 100644 index 0000000000000000000000000000000000000000..1d66b1a2e96d5e33d6f5fa8e81e76b4e5621d240 Binary files /dev/null and b/assets/fonts/ibm-plex-sans/IBMPlexSans-Bold.ttf differ diff --git a/assets/fonts/ibm-plex-sans/IBMPlexSans-BoldItalic.ttf b/assets/fonts/ibm-plex-sans/IBMPlexSans-BoldItalic.ttf new file mode 100644 index 0000000000000000000000000000000000000000..e07bc1f527dd77ba8fd967ef00a54371da7bfaf0 Binary files /dev/null and b/assets/fonts/ibm-plex-sans/IBMPlexSans-BoldItalic.ttf differ diff --git a/assets/fonts/ibm-plex-sans/IBMPlexSans-Italic.ttf b/assets/fonts/ibm-plex-sans/IBMPlexSans-Italic.ttf new file mode 100644 index 0000000000000000000000000000000000000000..efe8a1fb9de848891cd6e7d70280e1f47019c0ca Binary files /dev/null and b/assets/fonts/ibm-plex-sans/IBMPlexSans-Italic.ttf differ diff --git a/assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf b/assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..bd6817d5202895da5ac4fad88de3da71e652881a Binary files /dev/null and b/assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf differ diff --git a/assets/fonts/plex-mono/license.txt b/assets/fonts/ibm-plex-sans/license.txt similarity index 100% rename from assets/fonts/plex-mono/license.txt rename to assets/fonts/ibm-plex-sans/license.txt diff --git a/assets/fonts/lilex/Lilex-Bold.ttf b/assets/fonts/lilex/Lilex-Bold.ttf new file mode 100644 index 0000000000000000000000000000000000000000..45930ee30b81a78e964c3ff494e05ea45147ed9b Binary files /dev/null and b/assets/fonts/lilex/Lilex-Bold.ttf differ diff --git a/assets/fonts/lilex/Lilex-BoldItalic.ttf b/assets/fonts/lilex/Lilex-BoldItalic.ttf new file mode 100644 index 0000000000000000000000000000000000000000..10c6ab5f74ab2192f0258ba637c05de7d54b632f Binary files /dev/null and b/assets/fonts/lilex/Lilex-BoldItalic.ttf differ diff --git a/assets/fonts/lilex/Lilex-Italic.ttf b/assets/fonts/lilex/Lilex-Italic.ttf new file mode 100644 index 0000000000000000000000000000000000000000..e7aef10f7e7c6803199551463d0ab456d9c9e03a Binary files /dev/null and b/assets/fonts/lilex/Lilex-Italic.ttf differ diff --git a/assets/fonts/lilex/Lilex-Regular.ttf b/assets/fonts/lilex/Lilex-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..cb98a69b0ffb1976805bf5af3047307fe2422b1d Binary files /dev/null and b/assets/fonts/lilex/Lilex-Regular.ttf differ diff --git a/assets/fonts/plex-sans/license.txt b/assets/fonts/lilex/OFL.txt similarity index 96% rename from assets/fonts/plex-sans/license.txt rename to assets/fonts/lilex/OFL.txt index f72f76504cd73cf2c00b140b2743c138320597b6..156240bc907aa14571afb18b4b1bde62d3ea33e0 100644 --- a/assets/fonts/plex-sans/license.txt +++ b/assets/fonts/lilex/OFL.txt @@ -1,8 +1,9 @@ -Copyright © 2017 IBM Corp. with Reserved Font Name "Plex" +Copyright 2019 The Lilex Project Authors (https://github.com/mishamyrt/Lilex) This Font Software is licensed under the SIL Open Font License, Version 1.1. This license is copied below, and is also available with a FAQ at: -http://scripts.sil.org/OFL +https://scripts.sil.org/OFL + ----------------------------------------------------------- SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 @@ -89,4 +90,4 @@ COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM -OTHER DEALINGS IN THE FONT SOFTWARE. \ No newline at end of file +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/assets/fonts/plex-mono/ZedPlexMono-Bold.ttf b/assets/fonts/plex-mono/ZedPlexMono-Bold.ttf deleted file mode 100644 index d5f4b5e2855fe9e581155d864835570d2e5a06bf..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-mono/ZedPlexMono-Bold.ttf and /dev/null differ diff --git a/assets/fonts/plex-mono/ZedPlexMono-BoldItalic.ttf b/assets/fonts/plex-mono/ZedPlexMono-BoldItalic.ttf deleted file mode 100644 index 05eaf7cccde1c6f79558395d4cc06ed685342a60..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-mono/ZedPlexMono-BoldItalic.ttf and /dev/null differ diff --git a/assets/fonts/plex-mono/ZedPlexMono-Italic.ttf b/assets/fonts/plex-mono/ZedPlexMono-Italic.ttf deleted file mode 100644 index 3b078217578da8d88ddfd158133a11ed7f1780ee..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-mono/ZedPlexMono-Italic.ttf and /dev/null differ diff --git a/assets/fonts/plex-mono/ZedPlexMono-Regular.ttf b/assets/fonts/plex-mono/ZedPlexMono-Regular.ttf deleted file mode 100644 index 61dbb583619e200262c29589ef037538b2360d78..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-mono/ZedPlexMono-Regular.ttf and /dev/null differ diff --git a/assets/fonts/plex-sans/ZedPlexSans-Bold.ttf b/assets/fonts/plex-sans/ZedPlexSans-Bold.ttf deleted file mode 100644 index f1e66392f7c5a329fbaa14ae9f6d3ee0d48960cd..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-sans/ZedPlexSans-Bold.ttf and /dev/null differ diff --git a/assets/fonts/plex-sans/ZedPlexSans-BoldItalic.ttf b/assets/fonts/plex-sans/ZedPlexSans-BoldItalic.ttf deleted file mode 100644 index 7612dc516742f8255ce432cfee67f8225ed6ae62..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-sans/ZedPlexSans-BoldItalic.ttf and /dev/null differ diff --git a/assets/fonts/plex-sans/ZedPlexSans-Italic.ttf b/assets/fonts/plex-sans/ZedPlexSans-Italic.ttf deleted file mode 100644 index 8769c232ee69236158ed03a1306ff7f2be4ebdd8..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-sans/ZedPlexSans-Italic.ttf and /dev/null differ diff --git a/assets/fonts/plex-sans/ZedPlexSans-Regular.ttf b/assets/fonts/plex-sans/ZedPlexSans-Regular.ttf deleted file mode 100644 index 3ea293d59a31d2d80f0a1b35ef2c4507e7e4eec2..0000000000000000000000000000000000000000 Binary files a/assets/fonts/plex-sans/ZedPlexSans-Regular.ttf and /dev/null differ diff --git a/assets/icons/ai.svg b/assets/icons/ai.svg index d60396ad47db1a2068207c52f783c08cd2da4e69..4236d50337bef92cb550cdbf71d83843ab35e2f3 100644 --- a/assets/icons/ai.svg +++ b/assets/icons/ai.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/arrow_circle.svg b/assets/icons/arrow_circle.svg index 90e352bdea7a208356139bed8af5bb3c1301b5ce..cdfa93979505e45a9e876059eddf5a61ac489e1a 100644 --- a/assets/icons/arrow_circle.svg +++ b/assets/icons/arrow_circle.svg @@ -1,6 +1,6 @@ - - - - + + + + diff --git a/assets/icons/arrow_down.svg b/assets/icons/arrow_down.svg index 7d78497e6d28f3088f035762c39e38ea9b0e9f7b..60e6584c4568a5e113e225800024e835ea9743e7 100644 --- a/assets/icons/arrow_down.svg +++ b/assets/icons/arrow_down.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/arrow_down10.svg b/assets/icons/arrow_down10.svg index 97ce967a8b03311dfe9df75da6ee4b26e44ba72a..5933b758d939bef502495cbffcbddc60a3d42691 100644 --- a/assets/icons/arrow_down10.svg +++ b/assets/icons/arrow_down10.svg @@ -1 +1 @@ - + diff --git a/assets/icons/arrow_down_from_line.svg b/assets/icons/arrow_down_from_line.svg deleted file mode 100644 index 89316973a00ab03d48c14dac2faef9f09c93986e..0000000000000000000000000000000000000000 --- a/assets/icons/arrow_down_from_line.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/arrow_down_right.svg b/assets/icons/arrow_down_right.svg index b9c10263d04cfbbe8d6c621081fbf2dea146a3b0..ebdb06d77b24d5aa0d28615e156135495e8e80c4 100644 --- a/assets/icons/arrow_down_right.svg +++ b/assets/icons/arrow_down_right.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/arrow_left.svg b/assets/icons/arrow_left.svg index 57ee7504906134de8662732207eeb261fc6359f2..f7eacb2a779c94e3f743fdbc594773de73017e41 100644 --- a/assets/icons/arrow_left.svg +++ b/assets/icons/arrow_left.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/arrow_right.svg b/assets/icons/arrow_right.svg index 7a5b1174eb5a98250be404e052f3e197d95756b8..b9324af5a289ac2b4ae6d7b6374d603587763de0 100644 --- a/assets/icons/arrow_right.svg +++ b/assets/icons/arrow_right.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/arrow_right_left.svg b/assets/icons/arrow_right_left.svg index 30331960c9c83c85db68c9df07447f40ba4b7eeb..2c1211056a17eee8644b07b1fb651818f63db3dc 100644 --- a/assets/icons/arrow_right_left.svg +++ b/assets/icons/arrow_right_left.svg @@ -1 +1,6 @@ - + + + + + + diff --git a/assets/icons/arrow_up.svg b/assets/icons/arrow_up.svg index 81dfee8042609ff2a2a8b26026cddb43557b2be2..ff3ad441234b8d2ae1aeb17c531a9ecb288dc8d2 100644 --- a/assets/icons/arrow_up.svg +++ b/assets/icons/arrow_up.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/arrow_up_alt.svg b/assets/icons/arrow_up_alt.svg deleted file mode 100644 index c8cf286a8c0e2d60f6534f13e61bf8a3e8dbe898..0000000000000000000000000000000000000000 --- a/assets/icons/arrow_up_alt.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/arrow_up_from_line.svg b/assets/icons/arrow_up_from_line.svg deleted file mode 100644 index 50a075e42bd4e6cf515bcff51d3b758bea72f1c5..0000000000000000000000000000000000000000 --- a/assets/icons/arrow_up_from_line.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/arrow_up_right.svg b/assets/icons/arrow_up_right.svg index 9fbafba4ec9329049f03845ef80644a807ff981b..a948ef8f8130b99339130e4400320309ae3afaec 100644 --- a/assets/icons/arrow_up_right.svg +++ b/assets/icons/arrow_up_right.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/arrow_up_right_alt.svg b/assets/icons/arrow_up_right_alt.svg deleted file mode 100644 index 4e923c6867ac1ac396d2180827af390941c291a4..0000000000000000000000000000000000000000 --- a/assets/icons/arrow_up_right_alt.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/audio_off.svg b/assets/icons/audio_off.svg index dfb5a1c45829119ea0dc89bbca3a3f33228ee88f..43d2a04344748feab9496cd528aacba075d9f7e8 100644 --- a/assets/icons/audio_off.svg +++ b/assets/icons/audio_off.svg @@ -1,7 +1,7 @@ - - - - - + + + + + diff --git a/assets/icons/audio_on.svg b/assets/icons/audio_on.svg index d1bef0d337d6c8a0e79cb0dab8b7d63d5cb2a4d1..6e183bd585461e49418f58af95026590549c950b 100644 --- a/assets/icons/audio_on.svg +++ b/assets/icons/audio_on.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/backspace.svg b/assets/icons/backspace.svg index f7f1cf107a0c9d66ff97d788a340a19b588d3737..9ef4432b6f019b1eb71978e214e6ea9a3e680839 100644 --- a/assets/icons/backspace.svg +++ b/assets/icons/backspace.svg @@ -1,3 +1,5 @@ - - + + + + diff --git a/assets/icons/bell.svg b/assets/icons/bell.svg index f9b2a97fb34faceb155b5eb6a263ff0752b9e402..70225bb105f24ad42616fb10b4742a2d3176502b 100644 --- a/assets/icons/bell.svg +++ b/assets/icons/bell.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/bell_dot.svg b/assets/icons/bell_dot.svg index 09a17401dabe97c75eb8d8a977aa0ed00d12f1ec..959a7773cf2af4a6520741a40cc6866ffab4bdab 100644 --- a/assets/icons/bell_dot.svg +++ b/assets/icons/bell_dot.svg @@ -1,5 +1,5 @@ - - + + diff --git a/assets/icons/bell_off.svg b/assets/icons/bell_off.svg index 98cbd1eb603c48de6f157b5d4cbcfbf246e05702..5c3c1a0d68680d8d9a7fa42163c40e899259646c 100644 --- a/assets/icons/bell_off.svg +++ b/assets/icons/bell_off.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/bell_ring.svg b/assets/icons/bell_ring.svg index e411e7511b0b10be7efd5d85d1257b325f9d64de..838056cc032aa4c47c75ffa1a1f2e189835ff2da 100644 --- a/assets/icons/bell_ring.svg +++ b/assets/icons/bell_ring.svg @@ -1,6 +1,6 @@ - - - - + + + + diff --git a/assets/icons/binary.svg b/assets/icons/binary.svg index 8f5e456d165dda9f39491871732224339d8f2c9f..3c15e9b5470575c6251fef6fe1ae2f035ef677a6 100644 --- a/assets/icons/binary.svg +++ b/assets/icons/binary.svg @@ -1 +1 @@ - + diff --git a/assets/icons/blocks.svg b/assets/icons/blocks.svg index 588d49abbc14776278b9020fbbb6f8f9808b256e..84725d789233079e9f3f4138392af6b15a104d9c 100644 --- a/assets/icons/blocks.svg +++ b/assets/icons/blocks.svg @@ -1 +1,3 @@ - \ No newline at end of file + + + diff --git a/assets/icons/bolt_outlined.svg b/assets/icons/bolt_outlined.svg index 58fccf778813d3653f1066f45e5573adbf2d9ec2..ca9c75fbfd64beaac0ed544d2718a5ecb59a8243 100644 --- a/assets/icons/bolt_outlined.svg +++ b/assets/icons/bolt_outlined.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/book.svg b/assets/icons/book.svg index d30f81f32ea777233e4bdeabbf65fc57a377294b..a2ab394be4a74b9fb618dd3a7613b70f277181df 100644 --- a/assets/icons/book.svg +++ b/assets/icons/book.svg @@ -1 +1 @@ - + diff --git a/assets/icons/book_copy.svg b/assets/icons/book_copy.svg index b055d47b5fe45c64376115a83acf23ac6487ef4b..b7afd1df5c1fbaf51936a7170b3cfa0d0622511d 100644 --- a/assets/icons/book_copy.svg +++ b/assets/icons/book_copy.svg @@ -1 +1 @@ - + diff --git a/assets/icons/bug_off.svg b/assets/icons/bug_off.svg deleted file mode 100644 index 23f4ef06df08dc3b03281874cd27d3bf0b88987f..0000000000000000000000000000000000000000 --- a/assets/icons/bug_off.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/caret_down.svg b/assets/icons/caret_down.svg deleted file mode 100644 index ff8b8c3b88b8885b52a090eeca3f21526bb0a456..0000000000000000000000000000000000000000 --- a/assets/icons/caret_down.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/caret_up.svg b/assets/icons/caret_up.svg deleted file mode 100644 index 53026b83d8b36505b4b0f32c6e6d4a4c690c7beb..0000000000000000000000000000000000000000 --- a/assets/icons/caret_up.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/case_sensitive.svg b/assets/icons/case_sensitive.svg index 8c943e7509c82d6b8fc364dbd5c0e8ce3d6c44c4..015e241416ec30495e56a2b8d0327818ddeb92af 100644 --- a/assets/icons/case_sensitive.svg +++ b/assets/icons/case_sensitive.svg @@ -1,8 +1 @@ - - - - - - - - + diff --git a/assets/icons/chat.svg b/assets/icons/chat.svg index a0548c3d3e6917fbea2bfba825761e01cd215a33..c64f6b5e0efb65a7c2e056d55bee1917960b4d29 100644 --- a/assets/icons/chat.svg +++ b/assets/icons/chat.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/check.svg b/assets/icons/check.svg index 39352682c92375962996c3cd9d73c5057694beb6..21e2137965e01f4d384f1f2aad70629e2d75f313 100644 --- a/assets/icons/check.svg +++ b/assets/icons/check.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/check_circle.svg b/assets/icons/check_circle.svg index b48fe346316e01bf1b1a2d79dfd06a1c58ec4b3a..f9b88c4ce1451ef24a4084d6b3bb9469be85b571 100644 --- a/assets/icons/check_circle.svg +++ b/assets/icons/check_circle.svg @@ -1,4 +1,4 @@ - - - + + + diff --git a/assets/icons/check_double.svg b/assets/icons/check_double.svg index 5c17d95a6bf23f00659508441595ae0f7bed0590..fabc7005209070087e8d56e14808e68bd1f4c771 100644 --- a/assets/icons/check_double.svg +++ b/assets/icons/check_double.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/chevron_down.svg b/assets/icons/chevron_down.svg index b971555cfa0b8c15daf35522a3f3ef449ffac087..e4ca142a91fa18a252dbba72faffd9d403d29c2a 100644 --- a/assets/icons/chevron_down.svg +++ b/assets/icons/chevron_down.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/chevron_down_small.svg b/assets/icons/chevron_down_small.svg deleted file mode 100644 index 8f8a99d4b97d161c8c6a45f61c1750bcf0820b56..0000000000000000000000000000000000000000 --- a/assets/icons/chevron_down_small.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/chevron_left.svg b/assets/icons/chevron_left.svg index 8e61beed5df055132edde2510908324cc8a47fb1..fbe438fd4bfbcfc0bf08c2bbcf1c416c073ddc6d 100644 --- a/assets/icons/chevron_left.svg +++ b/assets/icons/chevron_left.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/chevron_right.svg b/assets/icons/chevron_right.svg index fcd9d83fc203578f5135a5d040999bea6765769e..4f170717c9b185a3204f0078160d34b0dae4aff1 100644 --- a/assets/icons/chevron_right.svg +++ b/assets/icons/chevron_right.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/chevron_up.svg b/assets/icons/chevron_up.svg index 171cdd61c0511aabe2f25463089d3cfd9cbf5039..bbe6b9762d244af38f4fe258f9f334638829e6f6 100644 --- a/assets/icons/chevron_up.svg +++ b/assets/icons/chevron_up.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/chevron_up_down.svg b/assets/icons/chevron_up_down.svg index a7414ec8a0706a72e1dc08fea1719e737f84a90f..299f6bce5ad1e5e6d89b0d12d4ce9deb2f3ee193 100644 --- a/assets/icons/chevron_up_down.svg +++ b/assets/icons/chevron_up_down.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/circle.svg b/assets/icons/circle.svg index 67306cb12a7133d0b6bcf8c589bc9f8bec994998..1d80edac09e928cbae0997934fa148fde9c84c09 100644 --- a/assets/icons/circle.svg +++ b/assets/icons/circle.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/circle_check.svg b/assets/icons/circle_check.svg index adfc8cecca52c36f9d58b8bc9adb00c3a2b57597..8950aa7a0e112683942ebd336072992b16a3ad92 100644 --- a/assets/icons/circle_check.svg +++ b/assets/icons/circle_check.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/circle_help.svg b/assets/icons/circle_help.svg index 1a004bfff877b6a9c13982dab961e548c11b9cc8..0e623bd1da3241616b9b6bd8fb7d6243b12b07b7 100644 --- a/assets/icons/circle_help.svg +++ b/assets/icons/circle_help.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/circle_off.svg b/assets/icons/circle_off.svg deleted file mode 100644 index be1bf2922591fdb764c4b7089dea13148e67159d..0000000000000000000000000000000000000000 --- a/assets/icons/circle_off.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/close.svg b/assets/icons/close.svg index 31c5aa31a6b2e90a11249f5dc5c2b4ceb5ffc501..846b3a703dc6f53f36736b515443830d51205c99 100644 --- a/assets/icons/close.svg +++ b/assets/icons/close.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/cloud.svg b/assets/icons/cloud.svg deleted file mode 100644 index 73a9618067bf933814e2dc2e9c53f56432e07839..0000000000000000000000000000000000000000 --- a/assets/icons/cloud.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/cloud_download.svg b/assets/icons/cloud_download.svg index bc7a8376d123088119643fc20346506690559bdc..70cda55856cc459674e7641366631a035fcc0251 100644 --- a/assets/icons/cloud_download.svg +++ b/assets/icons/cloud_download.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/assets/icons/code.svg b/assets/icons/code.svg index 757c5a1cb6d2babdd6d3cd9050fbd8e265d3b7ed..72d145224a16f28184ac0a6bedbb02f608248adf 100644 --- a/assets/icons/code.svg +++ b/assets/icons/code.svg @@ -1 +1 @@ - + diff --git a/assets/icons/cog.svg b/assets/icons/cog.svg index 03c0a290b79fb55b39d779de7ddfc6515519643f..7dd3a8befff59b5aaa0506df9b2cd7140725ab81 100644 --- a/assets/icons/cog.svg +++ b/assets/icons/cog.svg @@ -1 +1 @@ - + diff --git a/assets/icons/command.svg b/assets/icons/command.svg index d38389aea4b08fb19d1641e8735c960de9073562..f361ca2d05f71ec0af5d1e99ac0f0a633a932ebe 100644 --- a/assets/icons/command.svg +++ b/assets/icons/command.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/context.svg b/assets/icons/context.svg deleted file mode 100644 index 837b3aadd938a7be86de6445040fd6fa66d7de9d..0000000000000000000000000000000000000000 --- a/assets/icons/context.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/assets/icons/control.svg b/assets/icons/control.svg index 94189dc07dbe761cd1cc71b22598e1d2a2869ed5..f9341b6256143ce250aed35a38cebd2b6ed74207 100644 --- a/assets/icons/control.svg +++ b/assets/icons/control.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/copilot.svg b/assets/icons/copilot.svg index 06dbf178ae9727569eaa9b6f9bbe986a3d488e48..2584cd631006c10ea9535408657fd881f0748249 100644 --- a/assets/icons/copilot.svg +++ b/assets/icons/copilot.svg @@ -1,9 +1,9 @@ - - - - - - - - + + + + + + + + diff --git a/assets/icons/copilot_disabled.svg b/assets/icons/copilot_disabled.svg index eba36a2b692aca5841d5f3a8d131df980004fd9b..90afa8496673c92fba33d210c35d9898281db958 100644 --- a/assets/icons/copilot_disabled.svg +++ b/assets/icons/copilot_disabled.svg @@ -1,9 +1,9 @@ - - - - + + + + - + diff --git a/assets/icons/copilot_error.svg b/assets/icons/copilot_error.svg index 6069c554f1da71202b57a541a7b15195287723c9..77744e75290db59630bae1c992f6f95ec5f7b31e 100644 --- a/assets/icons/copilot_error.svg +++ b/assets/icons/copilot_error.svg @@ -1,7 +1,7 @@ - - + + - + diff --git a/assets/icons/copilot_init.svg b/assets/icons/copilot_init.svg index 6cbf63fb49324409a096df8d4c60e5ba6bd7a87d..754d159584b28658bcbd98cf5640de8021fd0555 100644 --- a/assets/icons/copilot_init.svg +++ b/assets/icons/copilot_init.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/copy.svg b/assets/icons/copy.svg index 7a3cdcf6da25639fd0bed532ed7a24d14f82cb71..bca13f8d56a1b644051c5be2f17c0e4cc1cdb43b 100644 --- a/assets/icons/copy.svg +++ b/assets/icons/copy.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/countdown_timer.svg b/assets/icons/countdown_timer.svg index b9b7479228d7c6d0f23c4c66d1ed1432cf97b6f0..5d1e775e68c8bc3871ad8070faeaea36c7395eec 100644 --- a/assets/icons/countdown_timer.svg +++ b/assets/icons/countdown_timer.svg @@ -1 +1 @@ - + diff --git a/assets/icons/crosshair.svg b/assets/icons/crosshair.svg index 006c6362aa981bdc38c3721a9550df32ec57bec0..3af6aa9fa35f29a6635d58027d7774ddd43a510c 100644 --- a/assets/icons/crosshair.svg +++ b/assets/icons/crosshair.svg @@ -1,7 +1,7 @@ - - - - - - + + + + + + diff --git a/assets/icons/cursor_i_beam.svg b/assets/icons/cursor_i_beam.svg index 3790de6f49d454bc5bb317e64e80a4daffceaa45..2d513181f94d2d2d29ebf1f779925bc09b85b699 100644 --- a/assets/icons/cursor_i_beam.svg +++ b/assets/icons/cursor_i_beam.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/dash.svg b/assets/icons/dash.svg index efff9eab5e82ac7dfbc9263081fc25fc1eae2551..3928ee7cfa2b9ce408d4560b6f1aa0c0b298d1fb 100644 --- a/assets/icons/dash.svg +++ b/assets/icons/dash.svg @@ -1 +1 @@ - + diff --git a/assets/icons/database_zap.svg b/assets/icons/database_zap.svg index 06241b35f49863d0036061df86a7945f058efce0..76af0f9251d096d91ee0d054ed867181a288e313 100644 --- a/assets/icons/database_zap.svg +++ b/assets/icons/database_zap.svg @@ -1 +1 @@ - + diff --git a/assets/icons/debug.svg b/assets/icons/debug.svg index ff51e42b1a9483f4f6d0382d67aa34bd3405f1ff..6423a2b090c1b838b0a4e84c089f5db694777790 100644 --- a/assets/icons/debug.svg +++ b/assets/icons/debug.svg @@ -1,12 +1,12 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/assets/icons/debug_breakpoint.svg b/assets/icons/debug_breakpoint.svg index f6a7b35658eefffcee89c0ed26cc6714a14c5045..c09a3c159fed6bb2423fc7e50ce7fcae8d425065 100644 --- a/assets/icons/debug_breakpoint.svg +++ b/assets/icons/debug_breakpoint.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/debug_continue.svg b/assets/icons/debug_continue.svg index e2a99c38d032fca8488ea3c9b6c2240da7cbb027..f03a8b2364b5fe8555cddb6d0e940480f2d15e75 100644 --- a/assets/icons/debug_continue.svg +++ b/assets/icons/debug_continue.svg @@ -1 +1 @@ - + diff --git a/assets/icons/debug_detach.svg b/assets/icons/debug_detach.svg index 0eb253715288fc23b7a29aae3f54dcc9c09e6456..8b3484557148a3e3e54638be5d6579f9741b7e3e 100644 --- a/assets/icons/debug_detach.svg +++ b/assets/icons/debug_detach.svg @@ -1 +1 @@ - + diff --git a/assets/icons/debug_disabled_breakpoint.svg b/assets/icons/debug_disabled_breakpoint.svg index a7260ec04bee66c0b37dd92860ecf1666e445d23..9a7c896f4709c97591196bc0c3d3813bb2a2a62c 100644 --- a/assets/icons/debug_disabled_breakpoint.svg +++ b/assets/icons/debug_disabled_breakpoint.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/debug_disabled_log_breakpoint.svg b/assets/icons/debug_disabled_log_breakpoint.svg index d0bb2c8e2b4fc33d303a3b877c9d0f30b9785cca..f477f4f32d83ee2ea7a364ac2d22bf7ce72a9f5a 100644 --- a/assets/icons/debug_disabled_log_breakpoint.svg +++ b/assets/icons/debug_disabled_log_breakpoint.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/debug_ignore_breakpoints.svg b/assets/icons/debug_ignore_breakpoints.svg index ba7074e083c700393adea1beb8d6d7b42782fa0b..bc95329c7ad1b44e075481ef52d98d69f650b176 100644 --- a/assets/icons/debug_ignore_breakpoints.svg +++ b/assets/icons/debug_ignore_breakpoints.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/debug_log_breakpoint.svg b/assets/icons/debug_log_breakpoint.svg index a878ce3e04189dafce13ba67388cb4b061443775..22eae9d0297a98fb7c53e7cf4eee5112d4eeb391 100644 --- a/assets/icons/debug_log_breakpoint.svg +++ b/assets/icons/debug_log_breakpoint.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/debug_pause.svg b/assets/icons/debug_pause.svg index bea531bc5a755beb4bc7250a44aaa44a47f0a511..65e194958146a55700a326f716915fa7a4fa86da 100644 --- a/assets/icons/debug_pause.svg +++ b/assets/icons/debug_pause.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/debug_restart.svg b/assets/icons/debug_restart.svg deleted file mode 100644 index 4eff13b94b698d0f5ebd3dbe56dabedcdfcf0c74..0000000000000000000000000000000000000000 --- a/assets/icons/debug_restart.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/debug_step_back.svg b/assets/icons/debug_step_back.svg index bc7c9b8444cda27e001c3ce6474bf8ea7038988a..61d45866f61cbabbd9a7ae9975809d342cb76ed5 100644 --- a/assets/icons/debug_step_back.svg +++ b/assets/icons/debug_step_back.svg @@ -1 +1 @@ - + diff --git a/assets/icons/debug_step_into.svg b/assets/icons/debug_step_into.svg index 69e5cff3f176caf137207927a8fcc6ec29f57297..9a517fc7ca0762b17446a75cd90f39a91e1b51cf 100644 --- a/assets/icons/debug_step_into.svg +++ b/assets/icons/debug_step_into.svg @@ -1,5 +1 @@ - - - - - + diff --git a/assets/icons/debug_step_out.svg b/assets/icons/debug_step_out.svg index 680e13e65e041426e09070b11d19f31e3e659c30..147a44f930f34f6c3ddce94693a178a932129cb5 100644 --- a/assets/icons/debug_step_out.svg +++ b/assets/icons/debug_step_out.svg @@ -1,5 +1 @@ - - - - - + diff --git a/assets/icons/debug_step_over.svg b/assets/icons/debug_step_over.svg index 005b901da3c49b92341a06fc3dd706ed7d898954..336abc11deb866a128e8418dab47af01b6e4d3f6 100644 --- a/assets/icons/debug_step_over.svg +++ b/assets/icons/debug_step_over.svg @@ -1,5 +1 @@ - - - - - + diff --git a/assets/icons/debug_stop.svg b/assets/icons/debug_stop.svg deleted file mode 100644 index fef651c5864a155b8451923e1f6bbfa0e6c5c14a..0000000000000000000000000000000000000000 --- a/assets/icons/debug_stop.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/delete.svg b/assets/icons/delete.svg deleted file mode 100644 index a7edbb615871745c0277b242b189a7e39135a4ed..0000000000000000000000000000000000000000 --- a/assets/icons/delete.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/diff.svg b/assets/icons/diff.svg index ca43c379daa63cc1de686140fb069888221c789a..9d93b2d5b47f56dd77338c1cf59c912a2cdad294 100644 --- a/assets/icons/diff.svg +++ b/assets/icons/diff.svg @@ -1 +1 @@ - + diff --git a/assets/icons/disconnected.svg b/assets/icons/disconnected.svg index 37d0ee904c77fb16c6f497e24acc5014a3f85b62..47bd1db4788825f9475d11a288f5ad715e6de5a5 100644 --- a/assets/icons/disconnected.svg +++ b/assets/icons/disconnected.svg @@ -1,3 +1 @@ - - - + diff --git a/assets/icons/document_text.svg b/assets/icons/document_text.svg deleted file mode 100644 index 78c08d92f93e2cbbb19c6414aeb2b1e6e8e2deb5..0000000000000000000000000000000000000000 --- a/assets/icons/document_text.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/download.svg b/assets/icons/download.svg index 2ffa65e8ac2ba4f97e5b567b82f5f249e12ab1a4..6c105d3fd74ac685176f0532e488322c00fb5fef 100644 --- a/assets/icons/download.svg +++ b/assets/icons/download.svg @@ -1 +1 @@ - + diff --git a/assets/icons/ellipsis.svg b/assets/icons/ellipsis.svg index 1858c655202cf6940c90278b43241bb1cabc32ac..22b5a8fd46dcbcf88526cdc6560efebb598f4714 100644 --- a/assets/icons/ellipsis.svg +++ b/assets/icons/ellipsis.svg @@ -1,5 +1,5 @@ - - - - + + + + diff --git a/assets/icons/ellipsis_vertical.svg b/assets/icons/ellipsis_vertical.svg index 077dbe8778f2015bc97a0ad07400585e8a7ea807..c38437667ebbe095aaa4be27244997a9138bf659 100644 --- a/assets/icons/ellipsis_vertical.svg +++ b/assets/icons/ellipsis_vertical.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/envelope.svg b/assets/icons/envelope.svg index 0f5e95f96817aefe0819c4767c7e77b9d1a17638..273cc6de267eeea7b9d893c81df57e806dafe089 100644 --- a/assets/icons/envelope.svg +++ b/assets/icons/envelope.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/equal.svg b/assets/icons/equal.svg deleted file mode 100644 index 9b3a151a12fc3dea5f1eb295bf299e6360846ed2..0000000000000000000000000000000000000000 --- a/assets/icons/equal.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/eraser.svg b/assets/icons/eraser.svg index edb893a8c6c3a4f8056f7c1543a37f43d3090da5..ca6209785fd1a2dd087792c645304dc05d4f4edb 100644 --- a/assets/icons/eraser.svg +++ b/assets/icons/eraser.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/escape.svg b/assets/icons/escape.svg index 00c772a2adfea0b43c10c5635c05649a00756de3..1898588a67172f1586c4e40f622b95b8bc971511 100644 --- a/assets/icons/escape.svg +++ b/assets/icons/escape.svg @@ -1 +1 @@ - + diff --git a/assets/icons/exit.svg b/assets/icons/exit.svg index 1ff9d7882441548e9c3534ae5ffe6b6331391b45..3619a55c87083c7e53bc2545d7e243dad7d58eca 100644 --- a/assets/icons/exit.svg +++ b/assets/icons/exit.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/expand_down.svg b/assets/icons/expand_down.svg index a17b9e285c982dbb99943bec446c1e06c97571f9..9f85ee67209ff89d31739219f76b89f24c71afec 100644 --- a/assets/icons/expand_down.svg +++ b/assets/icons/expand_down.svg @@ -1,4 +1,4 @@ - - - + + + diff --git a/assets/icons/expand_up.svg b/assets/icons/expand_up.svg index 30f9af92e3428c8cbc0ce515c9454e7d0684c54d..49b084fa8f41df5bc16443081ce1617e7d7e1ef9 100644 --- a/assets/icons/expand_up.svg +++ b/assets/icons/expand_up.svg @@ -1,4 +1,4 @@ - - - + + + diff --git a/assets/icons/expand_vertical.svg b/assets/icons/expand_vertical.svg index e2789114785b85aa33803a0e7b632f81c02642b0..5a5fa8ccb52019ccfc1afebd3972ef067634589e 100644 --- a/assets/icons/expand_vertical.svg +++ b/assets/icons/expand_vertical.svg @@ -1 +1 @@ - + diff --git a/assets/icons/external_link.svg b/assets/icons/external_link.svg deleted file mode 100644 index 561f012452cd5a7a76ecbbc4cd608f6fe0912d06..0000000000000000000000000000000000000000 --- a/assets/icons/external_link.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/eye.svg b/assets/icons/eye.svg index 21e3d3ba63f701ea00cf0acd13d003e08a227907..327fa751e992167fba8901848fb2a639fda39726 100644 --- a/assets/icons/eye.svg +++ b/assets/icons/eye.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/file.svg b/assets/icons/file.svg index 5b1b8927564f67c2bc56893881599fee16b15092..60cf2537d9e67321caf6b63f2775c1e6b29e6c32 100644 --- a/assets/icons/file.svg +++ b/assets/icons/file.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/file_code.svg b/assets/icons/file_code.svg index 0a15da770583ff64cd2b6cd3e6c6b7928ac51296..548d5a153ba243f7ae6890372ec9aaae765a9124 100644 --- a/assets/icons/file_code.svg +++ b/assets/icons/file_code.svg @@ -1 +1 @@ - + diff --git a/assets/icons/file_create.svg b/assets/icons/file_create.svg deleted file mode 100644 index bd7f88a7ec5b87c5ed6859fd0e4423451f5c1db4..0000000000000000000000000000000000000000 --- a/assets/icons/file_create.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/file_diff.svg b/assets/icons/file_diff.svg index ff20f16c60907c5ec535e76fe5a972c217f2f880..193dd7392ff1ff5cf4281921ffc2eb0b2b4697c9 100644 --- a/assets/icons/file_diff.svg +++ b/assets/icons/file_diff.svg @@ -1 +1 @@ - + diff --git a/assets/icons/file_doc.svg b/assets/icons/file_doc.svg index 3b11995f36759e6928abbc1cfbaa118345f0a21b..ccd5eeea01b01adc8598b0325bbaec935d272ba5 100644 --- a/assets/icons/file_doc.svg +++ b/assets/icons/file_doc.svg @@ -1,6 +1,6 @@ - + - - + + diff --git a/assets/icons/file_generic.svg b/assets/icons/file_generic.svg index 3c72bd3320d9e851641a4eecbc6d7c6bd3e989e3..790a5f18d723939131d2d7100c50020429eb4ff4 100644 --- a/assets/icons/file_generic.svg +++ b/assets/icons/file_generic.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_git.svg b/assets/icons/file_git.svg index 197db2e9e60f260c7a56a6e44c6250c531e0353d..2b36b0ffd3ba1c4389952a35a072027c6dc6de0f 100644 --- a/assets/icons/file_git.svg +++ b/assets/icons/file_git.svg @@ -1,6 +1,6 @@ - - - - + + + + diff --git a/assets/icons/file_icons/ai.svg b/assets/icons/file_icons/ai.svg index d60396ad47db1a2068207c52f783c08cd2da4e69..4236d50337bef92cb550cdbf71d83843ab35e2f3 100644 --- a/assets/icons/file_icons/ai.svg +++ b/assets/icons/file_icons/ai.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/audio.svg b/assets/icons/file_icons/audio.svg index 672f736c958662ca157b165de417076a391ac398..7948b046160e92eb9e0d3cce3e28e3c007ce0a83 100644 --- a/assets/icons/file_icons/audio.svg +++ b/assets/icons/file_icons/audio.svg @@ -1,8 +1,8 @@ - - - - - - + + + + + + diff --git a/assets/icons/file_icons/book.svg b/assets/icons/file_icons/book.svg index 3b11995f36759e6928abbc1cfbaa118345f0a21b..ccd5eeea01b01adc8598b0325bbaec935d272ba5 100644 --- a/assets/icons/file_icons/book.svg +++ b/assets/icons/file_icons/book.svg @@ -1,6 +1,6 @@ - + - - + + diff --git a/assets/icons/file_icons/bun.svg b/assets/icons/file_icons/bun.svg index 48af8b3088dd040f6fd5f39d05a9d9e9e8f413ce..ca1ec900bc0a18eb44c9d5e2a810ae2c3730ed8c 100644 --- a/assets/icons/file_icons/bun.svg +++ b/assets/icons/file_icons/bun.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/chevron_down.svg b/assets/icons/file_icons/chevron_down.svg index 9e60e40cf4c6a86f10ed3bc399b068a52208b572..9918f6c9f7188ca0e0de76071649be9a14f36d27 100644 --- a/assets/icons/file_icons/chevron_down.svg +++ b/assets/icons/file_icons/chevron_down.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/chevron_left.svg b/assets/icons/file_icons/chevron_left.svg index a2aa9ad996a432362d2c8382cd7c651ff13151b7..3299ee71684be25aa2a5f8d520c12b5509bbcbca 100644 --- a/assets/icons/file_icons/chevron_left.svg +++ b/assets/icons/file_icons/chevron_left.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/chevron_right.svg b/assets/icons/file_icons/chevron_right.svg index 06608c95ee11ec5fba5b9f5c235d4604001ab440..140f644127da6b3551b03a921c4a29f8daf0077b 100644 --- a/assets/icons/file_icons/chevron_right.svg +++ b/assets/icons/file_icons/chevron_right.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/chevron_up.svg b/assets/icons/file_icons/chevron_up.svg index fd3d5e4470b438119fd5a33245f5583f76dba32a..ae8c12a9899dcc57985c391fc8f382bbba210905 100644 --- a/assets/icons/file_icons/chevron_up.svg +++ b/assets/icons/file_icons/chevron_up.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/code.svg b/assets/icons/file_icons/code.svg index 5f012f883837f689da5c38e905b2eb0b9723945a..af2f6c5dc0e4916dd673c2a8a40f6e9b1cb9aa99 100644 --- a/assets/icons/file_icons/code.svg +++ b/assets/icons/file_icons/code.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/file_icons/coffeescript.svg b/assets/icons/file_icons/coffeescript.svg index fc49df62c0b74c73106cde11fa5766154d31db86..e91d187615b78a8f4cb6c5d2c209b2d772e7344a 100644 --- a/assets/icons/file_icons/coffeescript.svg +++ b/assets/icons/file_icons/coffeescript.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/conversations.svg b/assets/icons/file_icons/conversations.svg index cef764661fed601146b5a659369999d39c3a44d8..e25ed973ef4e47c4bb1a3c434a5398228f98f488 100644 --- a/assets/icons/file_icons/conversations.svg +++ b/assets/icons/file_icons/conversations.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/dart.svg b/assets/icons/file_icons/dart.svg index fd3ab01c93a42d7737a2d5af6aca4e8083372954..c9ec3de51a469fbc68712ce15849c144e48c6616 100644 --- a/assets/icons/file_icons/dart.svg +++ b/assets/icons/file_icons/dart.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/database.svg b/assets/icons/file_icons/database.svg index 10fbdcbff4ccd6b437e0815750a1fa91fe6bf187..a8226110d3775ce2bd784daccfa52633ab0ab597 100644 --- a/assets/icons/file_icons/database.svg +++ b/assets/icons/file_icons/database.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_icons/diff.svg b/assets/icons/file_icons/diff.svg index 07c46f1799604f0ac9581e51760184c142984f3d..ec59a0aabee71abe6fd954e63056425054a4cc60 100644 --- a/assets/icons/file_icons/diff.svg +++ b/assets/icons/file_icons/diff.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_icons/eslint.svg b/assets/icons/file_icons/eslint.svg index 0f42abe691b4ea275b8b74f1bf2e9b9ab2bcc2ca..ba72d9166b29bc5feca600c5111a68ae2357db6b 100644 --- a/assets/icons/file_icons/eslint.svg +++ b/assets/icons/file_icons/eslint.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/file.svg b/assets/icons/file_icons/file.svg index 3c72bd3320d9e851641a4eecbc6d7c6bd3e989e3..790a5f18d723939131d2d7100c50020429eb4ff4 100644 --- a/assets/icons/file_icons/file.svg +++ b/assets/icons/file_icons/file.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_icons/folder.svg b/assets/icons/file_icons/folder.svg index a76dc63d1a663993f02e4b6a88b200e4aea22f0c..e40613000da5ac10282bce1ed74fd6ef07ab566b 100644 --- a/assets/icons/file_icons/folder.svg +++ b/assets/icons/file_icons/folder.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/folder_open.svg b/assets/icons/file_icons/folder_open.svg index ef37f55f83a38f2eb5713ae615407276f76028b7..55231fb6abdb876aa86984fffaf9d3993552ebab 100644 --- a/assets/icons/file_icons/folder_open.svg +++ b/assets/icons/file_icons/folder_open.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/file_icons/font.svg b/assets/icons/file_icons/font.svg index 4cb01a28f27c1a715bad570aa13ef55e6d9a6412..6f2b734b26307eb2ba8584bf6d673bab701a7de2 100644 --- a/assets/icons/file_icons/font.svg +++ b/assets/icons/file_icons/font.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/git.svg b/assets/icons/file_icons/git.svg index 197db2e9e60f260c7a56a6e44c6250c531e0353d..2b36b0ffd3ba1c4389952a35a072027c6dc6de0f 100644 --- a/assets/icons/file_icons/git.svg +++ b/assets/icons/file_icons/git.svg @@ -1,6 +1,6 @@ - - - - + + + + diff --git a/assets/icons/file_icons/gleam.svg b/assets/icons/file_icons/gleam.svg index 6a3dc2c96fe76bee376d6fbd4a72c8d1cc56715d..0399bb4dd2a747845a79bbc78fc1e845768241c0 100644 --- a/assets/icons/file_icons/gleam.svg +++ b/assets/icons/file_icons/gleam.svg @@ -1,7 +1,7 @@ - - + + diff --git a/assets/icons/file_icons/graphql.svg b/assets/icons/file_icons/graphql.svg index 96884725998e29d223b5c7be76d6d1d27cb773c6..e6c0368182e6ed23fb5c36bb1b7d8ad251bc7e53 100644 --- a/assets/icons/file_icons/graphql.svg +++ b/assets/icons/file_icons/graphql.svg @@ -1,6 +1,6 @@ - - + + diff --git a/assets/icons/file_icons/hash.svg b/assets/icons/file_icons/hash.svg index 2241904266fa2f46df1eaeb6956229cf47e553c7..77e6c600725af5387ec1bebb2eb82d4eff6fa756 100644 --- a/assets/icons/file_icons/hash.svg +++ b/assets/icons/file_icons/hash.svg @@ -1,6 +1,6 @@ - - - - + + + + diff --git a/assets/icons/file_icons/heroku.svg b/assets/icons/file_icons/heroku.svg index 826a88646bf3753bd106308b3e211142c1b65280..732adf72cb6097543946d738e0110ccd75115faf 100644 --- a/assets/icons/file_icons/heroku.svg +++ b/assets/icons/file_icons/heroku.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/html.svg b/assets/icons/file_icons/html.svg index 41f254dd681530e29f443c0aa78c36ea440b4a69..8832bcba3a71bf8369b7f39913e9a24857e032e4 100644 --- a/assets/icons/file_icons/html.svg +++ b/assets/icons/file_icons/html.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_icons/image.svg b/assets/icons/file_icons/image.svg index 75e64c0a43a06570f4806944c390f406deeef5a6..c89de1b1285ed22959e39b1b6c6ce21b444b90e9 100644 --- a/assets/icons/file_icons/image.svg +++ b/assets/icons/file_icons/image.svg @@ -1,7 +1,7 @@ - - - + + + diff --git a/assets/icons/file_icons/java.svg b/assets/icons/file_icons/java.svg index 63ce6e768c835007013875c338d52279b8cfe515..70d2d10ed7b8e09d1e6195858d5ec50cb4e7de03 100644 --- a/assets/icons/file_icons/java.svg +++ b/assets/icons/file_icons/java.svg @@ -1,7 +1,7 @@ - - - - - + + + + + diff --git a/assets/icons/file_icons/lock.svg b/assets/icons/file_icons/lock.svg index 6bfef249b4516f3fbbf7f1a4c220b0fe893367d5..10ae33869a610714a66763683b38ff91ea9fa074 100644 --- a/assets/icons/file_icons/lock.svg +++ b/assets/icons/file_icons/lock.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/magnifying_glass.svg b/assets/icons/file_icons/magnifying_glass.svg index 75c3e76c80b5c1c577881d9fb7a942f162e395d3..d0440d905c35bce2960f4f9691a585c1d91e91fd 100644 --- a/assets/icons/file_icons/magnifying_glass.svg +++ b/assets/icons/file_icons/magnifying_glass.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/nix.svg b/assets/icons/file_icons/nix.svg index 879a4d76aac461739afb97ba6b1d00240c3b4490..215d58a035c2306111665ad7ee2b169950ee59ec 100644 --- a/assets/icons/file_icons/nix.svg +++ b/assets/icons/file_icons/nix.svg @@ -1,8 +1,8 @@ - - - - - - + + + + + + diff --git a/assets/icons/file_icons/notebook.svg b/assets/icons/file_icons/notebook.svg index b72ebc3967c8944163a62be92442315b706a8093..968d5c598297c794c7bf5cc86535ac3a3fa67daf 100644 --- a/assets/icons/file_icons/notebook.svg +++ b/assets/icons/file_icons/notebook.svg @@ -1,8 +1,8 @@ - - - - - + + + + + diff --git a/assets/icons/file_icons/package.svg b/assets/icons/file_icons/package.svg index 12889e80845869a6ea2453fb60619fa01a578a0b..16bbccb2e63788c0bf442ccd54f34f63a58c28be 100644 --- a/assets/icons/file_icons/package.svg +++ b/assets/icons/file_icons/package.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/phoenix.svg b/assets/icons/file_icons/phoenix.svg index b61b8beda7ba55e19f47b88e6e5a8bed9ddc02a3..5db68b4e44b0d13aaad3818f6b7635a8b4cd937f 100644 --- a/assets/icons/file_icons/phoenix.svg +++ b/assets/icons/file_icons/phoenix.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/plus.svg b/assets/icons/file_icons/plus.svg index f343d5dd87bf8fe4841de35fa09207906bae9a07..3449da3ecd70868f387937f2f4015c4a63ef2798 100644 --- a/assets/icons/file_icons/plus.svg +++ b/assets/icons/file_icons/plus.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/prettier.svg b/assets/icons/file_icons/prettier.svg index 835bd3a1267886671a2e7a28a2efc596d74bb4bf..f01230c33c4eb0de68731d074c8045b4ca877781 100644 --- a/assets/icons/file_icons/prettier.svg +++ b/assets/icons/file_icons/prettier.svg @@ -1,12 +1,12 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/assets/icons/file_icons/project.svg b/assets/icons/file_icons/project.svg index 86a15d41bc41f3652a82ee5d67fd275ecd8c02fc..509cc5f4d0a4d88f392864ada4c02928c4a9c431 100644 --- a/assets/icons/file_icons/project.svg +++ b/assets/icons/file_icons/project.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/python.svg b/assets/icons/file_icons/python.svg index de904d8e046a143b16283bbbbdfaf7010e22aed1..b44fdc539d4f08bb49e3810eadfff4c3d3abaf08 100644 --- a/assets/icons/file_icons/python.svg +++ b/assets/icons/file_icons/python.svg @@ -1,6 +1,6 @@ - - + + diff --git a/assets/icons/file_icons/replace.svg b/assets/icons/file_icons/replace.svg index 837cb23b669e2aceca4e27b69eb7ebceb08a9ca4..287328e82e7fc91f697ca19b6b006ee78f08ebd4 100644 --- a/assets/icons/file_icons/replace.svg +++ b/assets/icons/file_icons/replace.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/file_icons/replace_next.svg b/assets/icons/file_icons/replace_next.svg index 72511be70a2567c627e11b398bc95a591569675e..a9a9fc91f5816649aa4312285fb90e856558ee07 100644 --- a/assets/icons/file_icons/replace_next.svg +++ b/assets/icons/file_icons/replace_next.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/rust.svg b/assets/icons/file_icons/rust.svg index 5db753628af10c679f347c863cf9819b3f9afa14..9e4dc57adb4458f7d860182287fa9b766cd6d1d8 100644 --- a/assets/icons/file_icons/rust.svg +++ b/assets/icons/file_icons/rust.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/scala.svg b/assets/icons/file_icons/scala.svg index 9e89d1fa82338b0647f60283ab2bd8cfc8cc9850..0884cc96f4702cdf1ab5d4c7f1bf496752f82834 100644 --- a/assets/icons/file_icons/scala.svg +++ b/assets/icons/file_icons/scala.svg @@ -1,7 +1,7 @@ - + diff --git a/assets/icons/file_icons/settings.svg b/assets/icons/file_icons/settings.svg index 081d25bf482472bc6b1315b012644e8bcf16279f..d308135ff1fdc05166c83e595350facab31f5d30 100644 --- a/assets/icons/file_icons/settings.svg +++ b/assets/icons/file_icons/settings.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_icons/tcl.svg b/assets/icons/file_icons/tcl.svg index bb15b0f8e743c0555a6937cfc1a526ced1e5bb95..1bd7c4a5513dea6e375018d0323e8d4d2f013b8f 100644 --- a/assets/icons/file_icons/tcl.svg +++ b/assets/icons/file_icons/tcl.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/file_icons/toml.svg b/assets/icons/file_icons/toml.svg index 9ab78af50f9302615ec56535debe3794c2b73503..ae31911d6a659daec785a717926b0e4281683a69 100644 --- a/assets/icons/file_icons/toml.svg +++ b/assets/icons/file_icons/toml.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_icons/video.svg b/assets/icons/file_icons/video.svg index b96e359edbd859a33b4a3d83f93cb1395de60454..c249d4c82b0bbf6ccd17ed6c576ceba34f837a28 100644 --- a/assets/icons/file_icons/video.svg +++ b/assets/icons/file_icons/video.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/file_icons/vue.svg b/assets/icons/file_icons/vue.svg index 1cbe08dff52068688c16e773b283a715c8904ce6..1f993e90ef7f1103ecac0be40bff27566c75e338 100644 --- a/assets/icons/file_icons/vue.svg +++ b/assets/icons/file_icons/vue.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_lock.svg b/assets/icons/file_lock.svg index 6bfef249b4516f3fbbf7f1a4c220b0fe893367d5..10ae33869a610714a66763683b38ff91ea9fa074 100644 --- a/assets/icons/file_lock.svg +++ b/assets/icons/file_lock.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_markdown.svg b/assets/icons/file_markdown.svg new file mode 100644 index 0000000000000000000000000000000000000000..26688a3db0aa000889436fdf59e63cac2af7b743 --- /dev/null +++ b/assets/icons/file_markdown.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_rust.svg b/assets/icons/file_rust.svg index 5db753628af10c679f347c863cf9819b3f9afa14..9e4dc57adb4458f7d860182287fa9b766cd6d1d8 100644 --- a/assets/icons/file_rust.svg +++ b/assets/icons/file_rust.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/file_search.svg b/assets/icons/file_search.svg deleted file mode 100644 index ddf5b1477023c589e05195fab63d20535f71d213..0000000000000000000000000000000000000000 --- a/assets/icons/file_search.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/file_text.svg b/assets/icons/file_text.svg deleted file mode 100644 index a9b8f971e00911294565712161c0a36f342cb498..0000000000000000000000000000000000000000 --- a/assets/icons/file_text.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/assets/icons/file_text_filled.svg b/assets/icons/file_text_filled.svg new file mode 100644 index 0000000000000000000000000000000000000000..15c81cca6209174abd346b5a7af7fe55df4b014e --- /dev/null +++ b/assets/icons/file_text_filled.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_text_outlined.svg b/assets/icons/file_text_outlined.svg new file mode 100644 index 0000000000000000000000000000000000000000..d2e8897251e31b5ef10d009bbce7aba3a16521fe --- /dev/null +++ b/assets/icons/file_text_outlined.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_toml.svg b/assets/icons/file_toml.svg index 9ab78af50f9302615ec56535debe3794c2b73503..ae31911d6a659daec785a717926b0e4281683a69 100644 --- a/assets/icons/file_toml.svg +++ b/assets/icons/file_toml.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/file_tree.svg b/assets/icons/file_tree.svg index a140cd70b12d1be180d2c683d59400212969c47a..baf0e26ce6d8e88a18fd0496e708842e9d9394c3 100644 --- a/assets/icons/file_tree.svg +++ b/assets/icons/file_tree.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/filter.svg b/assets/icons/filter.svg index 7391fea132eac0e394cce97f0b1e630e2255f87d..4aa14e93c003d0770e973656c8af81af16d84b89 100644 --- a/assets/icons/filter.svg +++ b/assets/icons/filter.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/flame.svg b/assets/icons/flame.svg index 075e027a5c9634473d996c588234501da77fd26b..89fc6cab1ef07336d7ae5886c9cbcbec5d419e49 100644 --- a/assets/icons/flame.svg +++ b/assets/icons/flame.svg @@ -1 +1 @@ - + diff --git a/assets/icons/folder.svg b/assets/icons/folder.svg index 1a40805a702c02e063a8bfa85bed3103b0dd3518..35f4c1f8acf6796b14c2fb575e2181b771253706 100644 --- a/assets/icons/folder.svg +++ b/assets/icons/folder.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/folder_open.svg b/assets/icons/folder_open.svg index ef37f55f83a38f2eb5713ae615407276f76028b7..55231fb6abdb876aa86984fffaf9d3993552ebab 100644 --- a/assets/icons/folder_open.svg +++ b/assets/icons/folder_open.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/folder_search.svg b/assets/icons/folder_search.svg new file mode 100644 index 0000000000000000000000000000000000000000..207ea5c10e823929cc957e038487cb0f9d2f89ac --- /dev/null +++ b/assets/icons/folder_search.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/folder_x.svg b/assets/icons/folder_x.svg deleted file mode 100644 index b0f06f68eb71c8dacd48b806b0e7fb72243ddabf..0000000000000000000000000000000000000000 --- a/assets/icons/folder_x.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/font.svg b/assets/icons/font.svg index 861ab1a41540b347507e89165150b2cf90409b58..47633a58c93feb5f80da5bf7bd15a382a74ee975 100644 --- a/assets/icons/font.svg +++ b/assets/icons/font.svg @@ -1 +1 @@ - + diff --git a/assets/icons/font_size.svg b/assets/icons/font_size.svg index cfba2deb6c02973747c27ed8a8235b8a82468a7c..4286277bd900596d861a857a3b1e28b66d53d678 100644 --- a/assets/icons/font_size.svg +++ b/assets/icons/font_size.svg @@ -1 +1 @@ - + diff --git a/assets/icons/font_weight.svg b/assets/icons/font_weight.svg index 3ebbfa77bc77e6032b5e6b911dfe5232ca68faaf..410f43ec6e983f20e1f697c4a8f47253999ee786 100644 --- a/assets/icons/font_weight.svg +++ b/assets/icons/font_weight.svg @@ -1 +1 @@ - + diff --git a/assets/icons/forward_arrow.svg b/assets/icons/forward_arrow.svg index 0a7b71993f740ad99245497a9409a903e91c1646..e51796e5546a55b20a31f8bcaa9fbf8da8cb1b25 100644 --- a/assets/icons/forward_arrow.svg +++ b/assets/icons/forward_arrow.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/function.svg b/assets/icons/function.svg deleted file mode 100644 index 5d0b9d58ef7dbb2c570151c3e08305d38e875ec2..0000000000000000000000000000000000000000 --- a/assets/icons/function.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/generic_maximize.svg b/assets/icons/generic_maximize.svg index e44abd8f06a8dd8c7a405a1e6acaded1acffc6fd..f1d7da44efa95425e4758c51557d0c28913a5f5f 100644 --- a/assets/icons/generic_maximize.svg +++ b/assets/icons/generic_maximize.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/generic_restore.svg b/assets/icons/generic_restore.svg index 3bf581f2cd6e9c4348c75be2b167f954c5309ea3..d8a3d72bcd001a3ea81516e7e2cdccda40956fdd 100644 --- a/assets/icons/generic_restore.svg +++ b/assets/icons/generic_restore.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/git_branch.svg b/assets/icons/git_branch.svg index db6190a9c8f96e635ec6a3cc5ec527bd3429742a..fc6dcfe1b275974e64c292e56e7f962aa67cde06 100644 --- a/assets/icons/git_branch.svg +++ b/assets/icons/git_branch.svg @@ -1 +1 @@ - + diff --git a/assets/icons/git_branch_alt.svg b/assets/icons/git_branch_alt.svg new file mode 100644 index 0000000000000000000000000000000000000000..cf40195d8b2faaea629b04ec2430bd9e8afeff5f --- /dev/null +++ b/assets/icons/git_branch_alt.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/git_branch_small.svg b/assets/icons/git_branch_small.svg deleted file mode 100644 index 22832d6fedfc5221c31c81eae497f8172b59c21e..0000000000000000000000000000000000000000 --- a/assets/icons/git_branch_small.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/assets/icons/github.svg b/assets/icons/github.svg index 28148b9894b21f0f5995f3ffcfc464b161f3169a..0a12c9b656f659b010d2aaa4f1f89290368d1941 100644 --- a/assets/icons/github.svg +++ b/assets/icons/github.svg @@ -1 +1 @@ - + diff --git a/assets/icons/globe.svg b/assets/icons/globe.svg deleted file mode 100644 index 545b83aa718123dacae4d286811512972412d719..0000000000000000000000000000000000000000 --- a/assets/icons/globe.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/assets/icons/hammer.svg b/assets/icons/hammer.svg deleted file mode 100644 index ccc0d30e3d1a7a9fe2222cc1f482de4df2d991a2..0000000000000000000000000000000000000000 --- a/assets/icons/hammer.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/hash.svg b/assets/icons/hash.svg index f685245ed3c2a2881b2fe1377bf029e8e515ae94..afc1f9c0b50ceaca0bae9e5c6772d5d53665e31b 100644 --- a/assets/icons/hash.svg +++ b/assets/icons/hash.svg @@ -1,6 +1 @@ - - - - - - + diff --git a/assets/icons/history_rerun.svg b/assets/icons/history_rerun.svg index 9ade606b31ed0bb646925210ee7c522e77208ca2..e11e754318192b25362ccc5a3a5ef61b25b1a474 100644 --- a/assets/icons/history_rerun.svg +++ b/assets/icons/history_rerun.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/image.svg b/assets/icons/image.svg index 4b17300f474dcf17b2836564ba16c173455f2985..e0d73d76212f38aef9a2a61eb5fb48447db717b6 100644 --- a/assets/icons/image.svg +++ b/assets/icons/image.svg @@ -1 +1 @@ - + diff --git a/assets/icons/info.svg b/assets/icons/info.svg index f3d2e6644ff2d7119965a08a1a1cfd45e2bf6f0b..c000f25867c4092fe08ed01a48a18e6ce07b2784 100644 --- a/assets/icons/info.svg +++ b/assets/icons/info.svg @@ -1,5 +1,5 @@ - - + + diff --git a/assets/icons/inlay_hint.svg b/assets/icons/inlay_hint.svg deleted file mode 100644 index c8e6bb2d3626e1dab08e91b41dc3625449294797..0000000000000000000000000000000000000000 --- a/assets/icons/inlay_hint.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/json.svg b/assets/icons/json.svg new file mode 100644 index 0000000000000000000000000000000000000000..af2f6c5dc0e4916dd673c2a8a40f6e9b1cb9aa99 --- /dev/null +++ b/assets/icons/json.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/keyboard.svg b/assets/icons/keyboard.svg index 8bdc054a655322edae801dcf5bf6a6afef82a362..82791cda3fe31192f3be66fd6f27d2ca3c068bdd 100644 --- a/assets/icons/keyboard.svg +++ b/assets/icons/keyboard.svg @@ -1 +1 @@ - + diff --git a/assets/icons/knockouts/x_fg.svg b/assets/icons/knockouts/x_fg.svg index a3d47f13735e734ca2f801618a4edbee02ea457b..f459954f729f3b80c50b64ec7ad0547d534235e9 100644 --- a/assets/icons/knockouts/x_fg.svg +++ b/assets/icons/knockouts/x_fg.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/layout.svg b/assets/icons/layout.svg deleted file mode 100644 index 79464013b14d46cc2c213fff44ae1fa8619938a7..0000000000000000000000000000000000000000 --- a/assets/icons/layout.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/library.svg b/assets/icons/library.svg index 95f8c710c8673ca78fcb9e72b0c8ab4be6d8ffbc..fc7f5afcd2fa45626033d8cd7f9963e28b5f8d31 100644 --- a/assets/icons/library.svg +++ b/assets/icons/library.svg @@ -1 +1,6 @@ - + + + + + + diff --git a/assets/icons/light_bulb.svg b/assets/icons/light_bulb.svg deleted file mode 100644 index 61a8f042116fdbe57e8210f7a8682f24b7d52fc3..0000000000000000000000000000000000000000 --- a/assets/icons/light_bulb.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/line_height.svg b/assets/icons/line_height.svg index 904cfad8a8638c55b79f9721e3b69ac25ec6c0c0..3929fc408001022def1e54c60d193058bfa4aabd 100644 --- a/assets/icons/line_height.svg +++ b/assets/icons/line_height.svg @@ -1,6 +1 @@ - - - - - - + diff --git a/assets/icons/link.svg b/assets/icons/link.svg deleted file mode 100644 index 4925bd8e0008b3f39fdd9316e075ec03dcd29dd2..0000000000000000000000000000000000000000 --- a/assets/icons/link.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/list_collapse.svg b/assets/icons/list_collapse.svg index a0e0ed604d27dfb19fcf5d651029d6a57818c0cb..f18bc550b90228c2f689848b86cfc5bea3d6ff50 100644 --- a/assets/icons/list_collapse.svg +++ b/assets/icons/list_collapse.svg @@ -1 +1 @@ - + diff --git a/assets/icons/list_todo.svg b/assets/icons/list_todo.svg index 1f50219418231e9d25fe9441ae1e4ae445abfcee..709f26d89dbb5b5e95869bf4daa41fa5ad230ba9 100644 --- a/assets/icons/list_todo.svg +++ b/assets/icons/list_todo.svg @@ -1 +1 @@ - + diff --git a/assets/icons/list_tree.svg b/assets/icons/list_tree.svg index 09872a60f7ed9c85e89f06b7384b083a7f4b5779..de3e0f3a57b0e0edfc38cfa2b8b364529a741cea 100644 --- a/assets/icons/list_tree.svg +++ b/assets/icons/list_tree.svg @@ -1,7 +1,7 @@ - - - - - + + + + + diff --git a/assets/icons/list_x.svg b/assets/icons/list_x.svg index 683f38ab5dfe5b7f30cb507bd9e4679321fc774a..0fa3bd68fbf362ebb769f840aecf320f13c219da 100644 --- a/assets/icons/list_x.svg +++ b/assets/icons/list_x.svg @@ -1,7 +1,7 @@ - - - - - + + + + + diff --git a/assets/icons/load_circle.svg b/assets/icons/load_circle.svg index c4de36b1ff403ee2eec69c3491d4e8a8c7d4fbcc..eecf099310e17daeb9e953a816dbca389447292a 100644 --- a/assets/icons/load_circle.svg +++ b/assets/icons/load_circle.svg @@ -1 +1 @@ - + diff --git a/assets/icons/location_edit.svg b/assets/icons/location_edit.svg index de82e8db4e05da232d024d6a92e329fd15a94ff0..e342652eb153caaea18cb40b1154e8444e8554ec 100644 --- a/assets/icons/location_edit.svg +++ b/assets/icons/location_edit.svg @@ -1 +1 @@ - + diff --git a/assets/icons/lock_outlined.svg b/assets/icons/lock_outlined.svg index 0bfd2fdc82ad6cfd21e9fd2c901a7604fb6c0ba9..d69a2456031113e6451b046acc71deaf559f3dc7 100644 --- a/assets/icons/lock_outlined.svg +++ b/assets/icons/lock_outlined.svg @@ -1,6 +1,6 @@ - - + + - + diff --git a/assets/icons/logo_96.svg b/assets/icons/logo_96.svg deleted file mode 100644 index dc98bb8bc249bfb1decb1771b33470b324dde96f..0000000000000000000000000000000000000000 --- a/assets/icons/logo_96.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/lsp_debug.svg b/assets/icons/lsp_debug.svg deleted file mode 100644 index aa49fcb6a214de6c5361d641d8236fbe4a0f6fc0..0000000000000000000000000000000000000000 --- a/assets/icons/lsp_debug.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/assets/icons/lsp_restart.svg b/assets/icons/lsp_restart.svg deleted file mode 100644 index dfc68e7a9ea1b431a68e82d138d404fa656c3190..0000000000000000000000000000000000000000 --- a/assets/icons/lsp_restart.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/assets/icons/lsp_stop.svg b/assets/icons/lsp_stop.svg deleted file mode 100644 index c6311d215582d38e865d6fbc56ac01c3d27fc28d..0000000000000000000000000000000000000000 --- a/assets/icons/lsp_stop.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/assets/icons/magnifying_glass.svg b/assets/icons/magnifying_glass.svg index 75c3e76c80b5c1c577881d9fb7a942f162e395d3..24f00bb51bccc34a61a55bea0aa2fcdabfc99b60 100644 --- a/assets/icons/magnifying_glass.svg +++ b/assets/icons/magnifying_glass.svg @@ -1,3 +1,4 @@ - + + diff --git a/assets/icons/mail_open.svg b/assets/icons/mail_open.svg deleted file mode 100644 index b857037b86faaa5dda8f86875d16a48589d2cbae..0000000000000000000000000000000000000000 --- a/assets/icons/mail_open.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/maximize.svg b/assets/icons/maximize.svg index b3504b5701e3081f82c4fc0ee2ec89642fb439c4..7b6d26fed8fd0a5074def7afe880bb26274afe0a 100644 --- a/assets/icons/maximize.svg +++ b/assets/icons/maximize.svg @@ -1 +1,6 @@ - + + + + + + diff --git a/assets/icons/menu.svg b/assets/icons/menu.svg index 6598697ff83fbb54e760711e569c26f5fac6776f..f12ce47f7e9d6208accfd97d66ce8b977b36080e 100644 --- a/assets/icons/menu.svg +++ b/assets/icons/menu.svg @@ -1 +1 @@ - + diff --git a/assets/icons/menu_alt.svg b/assets/icons/menu_alt.svg index ae3581ba016359500af017ce532525bcd7877a29..b9cc19e22febe045ca9ccf4a7e86d69b258f875c 100644 --- a/assets/icons/menu_alt.svg +++ b/assets/icons/menu_alt.svg @@ -1,5 +1,3 @@ - - - - + + diff --git a/assets/icons/menu_alt_temp.svg b/assets/icons/menu_alt_temp.svg new file mode 100644 index 0000000000000000000000000000000000000000..87add13216d9eb8c4c3d8f345ff1695e98be2d5d --- /dev/null +++ b/assets/icons/menu_alt_temp.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/mic.svg b/assets/icons/mic.svg index 1d9c5bc9edf2a48b3311965fb57758b3ee2e015e..000d135ea54a539be0d381ac22fff334e1bc24df 100644 --- a/assets/icons/mic.svg +++ b/assets/icons/mic.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/mic_mute.svg b/assets/icons/mic_mute.svg index 8c61ae2f1ccedc1b27244ed80e1a3fdd75cd4120..8bc63be610baf13539957a218c0fd9af3425a12f 100644 --- a/assets/icons/mic_mute.svg +++ b/assets/icons/mic_mute.svg @@ -1,8 +1,8 @@ - - - - - - + + + + + + diff --git a/assets/icons/minimize.svg b/assets/icons/minimize.svg index 0451233cc9b5455396d7655f8a9b1bc4791d0fc7..082ade47dbdab16ea69bc1e00f6e0c7bd5a8d936 100644 --- a/assets/icons/minimize.svg +++ b/assets/icons/minimize.svg @@ -1 +1,6 @@ - + + + + + + diff --git a/assets/icons/notepad.svg b/assets/icons/notepad.svg new file mode 100644 index 0000000000000000000000000000000000000000..27fd35566eee2141946658d315a155016e5ac345 --- /dev/null +++ b/assets/icons/notepad.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/option.svg b/assets/icons/option.svg index 9d54a6f34b1546bc7b1ed907f231eeb80f0c1264..47201f7c671e4503cf597252973ebe4c4e3b5c7d 100644 --- a/assets/icons/option.svg +++ b/assets/icons/option.svg @@ -1,3 +1,4 @@ - + + diff --git a/assets/icons/panel_left.svg b/assets/icons/panel_left.svg deleted file mode 100644 index 2eed26673ea4d3592ff056a43673a9f17d45160a..0000000000000000000000000000000000000000 --- a/assets/icons/panel_left.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/panel_right.svg b/assets/icons/panel_right.svg deleted file mode 100644 index d29a4a519e5e2225458b8af089943c6796ae696b..0000000000000000000000000000000000000000 --- a/assets/icons/panel_right.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/pencil.svg b/assets/icons/pencil.svg index d90dcda10dd110cc491253393d5e6cb684b878bf..c4d289e9c06c7fdc6d4e1875f4170c87ef6ea425 100644 --- a/assets/icons/pencil.svg +++ b/assets/icons/pencil.svg @@ -1,3 +1,4 @@ - - + + + diff --git a/assets/icons/person.svg b/assets/icons/person.svg index 93bee97a5fc345b73913e90383375be040ff8a33..a1c29e4acb29a52c5fb6f08e75875306026c63f0 100644 --- a/assets/icons/person.svg +++ b/assets/icons/person.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/person_circle.svg b/assets/icons/person_circle.svg deleted file mode 100644 index 7e22682e0efcb5c9644adfe2161a08d6f7f456b6..0000000000000000000000000000000000000000 --- a/assets/icons/person_circle.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/phone_incoming.svg b/assets/icons/phone_incoming.svg deleted file mode 100644 index 4577df47ad51af2d6c77f523de5e6ff0d188b4ec..0000000000000000000000000000000000000000 --- a/assets/icons/phone_incoming.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/pin.svg b/assets/icons/pin.svg index f3f50cc65953d2dc3b3ce038da708de213c6058a..d23daff8b988a5882f95ef62c3de5d99612d9317 100644 --- a/assets/icons/pin.svg +++ b/assets/icons/pin.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/play_filled.svg b/assets/icons/play_filled.svg index c632434305c6bd25da205ca8cee8203b9d3611b1..8075197ad2ae94fe3ffec1a2685e90f8b57ea513 100644 --- a/assets/icons/play_filled.svg +++ b/assets/icons/play_filled.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/play_outlined.svg b/assets/icons/play_outlined.svg index 7e1cacd5af8795501cc30f4e33927f752a1eba7f..ba1ea2693d61646623a998b7d34ae0ca2d716cef 100644 --- a/assets/icons/play_outlined.svg +++ b/assets/icons/play_outlined.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/plus.svg b/assets/icons/plus.svg index e26d430320eae364d012a2482c39de19fce4ed2a..8ac57d8cdde017ef51d622cf8b63af644b3de332 100644 --- a/assets/icons/plus.svg +++ b/assets/icons/plus.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/pocket_knife.svg b/assets/icons/pocket_knife.svg deleted file mode 100644 index fb2d078e202ac13a1b91361689e75a3336f35b6e..0000000000000000000000000000000000000000 --- a/assets/icons/pocket_knife.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/power.svg b/assets/icons/power.svg index 787d1a351966e85715c3e14cfeac95f283daa6b3..29bd2127c58e2e940ad044e18c2767d8826de6d5 100644 --- a/assets/icons/power.svg +++ b/assets/icons/power.svg @@ -1 +1 @@ - + diff --git a/assets/icons/public.svg b/assets/icons/public.svg index 38278cdabaf203ef47fea13a88a60ed6f61d18ac..5659b5419f7d4df12ab45d8a7dfbc954ccd4c131 100644 --- a/assets/icons/public.svg +++ b/assets/icons/public.svg @@ -1,3 +1 @@ - - - + diff --git a/assets/icons/pull_request.svg b/assets/icons/pull_request.svg index 150a532cc63a079d989f0cdd6e98ecd8532f0e2c..515462ab64406cf22d83717304cc40293ab21230 100644 --- a/assets/icons/pull_request.svg +++ b/assets/icons/pull_request.svg @@ -1 +1 @@ - + diff --git a/assets/icons/quote.svg b/assets/icons/quote.svg index b970db14300b1837691d346498e8af989442e721..a958bc67f2a7c04611499b53d4c25bec5ad1f2ff 100644 --- a/assets/icons/quote.svg +++ b/assets/icons/quote.svg @@ -1 +1 @@ - + diff --git a/assets/icons/reader.svg b/assets/icons/reader.svg new file mode 100644 index 0000000000000000000000000000000000000000..f477f4f32d83ee2ea7a364ac2d22bf7ce72a9f5a --- /dev/null +++ b/assets/icons/reader.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/refresh_title.svg b/assets/icons/refresh_title.svg index bd3657d48c4051b35be4dc3f9acccd6bd4528af0..c9e670bfabe7940d6936d9e767fa23d73d4703b5 100644 --- a/assets/icons/refresh_title.svg +++ b/assets/icons/refresh_title.svg @@ -1,5 +1 @@ - - - - - + diff --git a/assets/icons/regex.svg b/assets/icons/regex.svg index 1b24398cc1d4bf03bc3b0b15f76476697f578736..818c2ba360bc5aca3d4a7bf8ab65a03a2efe235e 100644 --- a/assets/icons/regex.svg +++ b/assets/icons/regex.svg @@ -1,4 +1,4 @@ - - - + + + diff --git a/assets/icons/repl_neutral.svg b/assets/icons/repl_neutral.svg index db647fe40b24a3df9574b62e365c55bb2bbfd1a9..2842e2c4210085cb930efee43aa3340a4d628d6a 100644 --- a/assets/icons/repl_neutral.svg +++ b/assets/icons/repl_neutral.svg @@ -1,13 +1,6 @@ - - - - - - - - - - - + + + + diff --git a/assets/icons/repl_off.svg b/assets/icons/repl_off.svg index 51ada0db4621c3f7e22d8f371b8975d888a255e6..3018ceaf8588cd9b545397d6167d5d68d8e33bfa 100644 --- a/assets/icons/repl_off.svg +++ b/assets/icons/repl_off.svg @@ -1,20 +1,11 @@ - - - - - - - - - - - - - - - - - - - + + + + + + + + + + diff --git a/assets/icons/repl_pause.svg b/assets/icons/repl_pause.svg index 2ac327df3b0d887ac8e4e0eab513ec5548c66b9f..5a69a576c1152d71a714252d0cc66699eb39b1b3 100644 --- a/assets/icons/repl_pause.svg +++ b/assets/icons/repl_pause.svg @@ -1,15 +1,8 @@ - - - - - - - - - - - - - - + + + + + + + diff --git a/assets/icons/repl_play.svg b/assets/icons/repl_play.svg index d23b8991126371bd40c4b464c96f45fc3ab4213c..0c8f4b0832ba2d74ae793751328e9927e45c950f 100644 --- a/assets/icons/repl_play.svg +++ b/assets/icons/repl_play.svg @@ -1,14 +1,7 @@ - - - - - - - - - - - - - + + + + + + diff --git a/assets/icons/replace.svg b/assets/icons/replace.svg index 837cb23b669e2aceca4e27b69eb7ebceb08a9ca4..287328e82e7fc91f697ca19b6b006ee78f08ebd4 100644 --- a/assets/icons/replace.svg +++ b/assets/icons/replace.svg @@ -1,5 +1,5 @@ - + diff --git a/assets/icons/replace_next.svg b/assets/icons/replace_next.svg index 72511be70a2567c627e11b398bc95a591569675e..a9a9fc91f5816649aa4312285fb90e856558ee07 100644 --- a/assets/icons/replace_next.svg +++ b/assets/icons/replace_next.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/rerun.svg b/assets/icons/rerun.svg index 4d22f924f5d7e73fc420bcab056adac00cad0710..1a03a01ae6403e92565bc85a92eaddcb9edaa601 100644 --- a/assets/icons/rerun.svg +++ b/assets/icons/rerun.svg @@ -1,7 +1 @@ - - - - - - - + diff --git a/assets/icons/return.svg b/assets/icons/return.svg index 16cfeeda2e3ad0e2acee6d7a594916e796fd2405..c605eb6512b3f4314cc1d42936213ad4eef5b041 100644 --- a/assets/icons/return.svg +++ b/assets/icons/return.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/rotate_ccw.svg b/assets/icons/rotate_ccw.svg index 4eff13b94b698d0f5ebd3dbe56dabedcdfcf0c74..cdfa8d0ab4d649e6255061faa54d337f038bc611 100644 --- a/assets/icons/rotate_ccw.svg +++ b/assets/icons/rotate_ccw.svg @@ -1 +1 @@ - + diff --git a/assets/icons/rotate_cw.svg b/assets/icons/rotate_cw.svg index 2098de38c235edcb9779858f62137b4269b432c5..2adfa7f972b71b4d7b9194d4dc9488745dc18ce9 100644 --- a/assets/icons/rotate_cw.svg +++ b/assets/icons/rotate_cw.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/route.svg b/assets/icons/route.svg deleted file mode 100644 index 7d2a5621ff425a8dc4d75b30b5e300b2d57b52c3..0000000000000000000000000000000000000000 --- a/assets/icons/route.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/save.svg b/assets/icons/save.svg deleted file mode 100644 index f83d035331c2badfd2e1411d06df97a0e37bd4e3..0000000000000000000000000000000000000000 --- a/assets/icons/save.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/scissors.svg b/assets/icons/scissors.svg index e7fb6005f45bcd7e081a93019daff5fef47dded8..a19580bd89d3b3da56df2089f2de7dbd0cd981fe 100644 --- a/assets/icons/scissors.svg +++ b/assets/icons/scissors.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/screen.svg b/assets/icons/screen.svg index 4b686b58f9de2e4993546ddad1a20af395d50330..4bcdf19528a799b4725f617d56d1f84baa33c904 100644 --- a/assets/icons/screen.svg +++ b/assets/icons/screen.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/scroll_text.svg b/assets/icons/scroll_text.svg deleted file mode 100644 index f066c8a84e71ad209e2ebb6b9f7404182ee63552..0000000000000000000000000000000000000000 --- a/assets/icons/scroll_text.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/search_selection.svg b/assets/icons/search_selection.svg deleted file mode 100644 index b970db14300b1837691d346498e8af989442e721..0000000000000000000000000000000000000000 --- a/assets/icons/search_selection.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/select_all.svg b/assets/icons/select_all.svg index 78c3ee639939ecfb13d5da18ffcee8b53079c3a6..4fa17dcf6371838974f393021a39953afbbdcfdf 100644 --- a/assets/icons/select_all.svg +++ b/assets/icons/select_all.svg @@ -1,5 +1 @@ - - - - - + diff --git a/assets/icons/send.svg b/assets/icons/send.svg index 0d6ad3634171a78ab758a06536075247e9b9ca94..5ceeef2af4721301bdb3e92e06c019aa4440a39f 100644 --- a/assets/icons/send.svg +++ b/assets/icons/send.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/server.svg b/assets/icons/server.svg index a8b6ad92b3cc90688bfaa4d259dce75b26c81d2e..8d851d1328d60b0ba45f89a70fb97ff49bf5b732 100644 --- a/assets/icons/server.svg +++ b/assets/icons/server.svg @@ -1,16 +1,6 @@ - - - - - + + + + + diff --git a/assets/icons/settings.svg b/assets/icons/settings.svg index a82cf0339822bbffdde1d750b2a9c6f01397f073..33ac74f2300ed4ebad1e9ea4f523a10bb4865eea 100644 --- a/assets/icons/settings.svg +++ b/assets/icons/settings.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/settings_alt.svg b/assets/icons/settings_alt.svg deleted file mode 100644 index a5fb4171d5c162a06ed036c348dad33093490991..0000000000000000000000000000000000000000 --- a/assets/icons/settings_alt.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/assets/icons/shield_check.svg b/assets/icons/shield_check.svg index 6e58c314682a5e87de9b2ca582262a3110f7006d..43b52f43a8d70beb6e69c2271235090db4dc2c00 100644 --- a/assets/icons/shield_check.svg +++ b/assets/icons/shield_check.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/shift.svg b/assets/icons/shift.svg index 02321147773469108283afbe65a68b303665f4d3..c38807d8b0b434ff4dd868decfd1d79ef772f720 100644 --- a/assets/icons/shift.svg +++ b/assets/icons/shift.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/slash.svg b/assets/icons/slash.svg index 792c405bb081a99ba456a7cb61ea5fbb5cfcd270..1ebf01eb9f13af5b449dfdfcd075f759ac4a1d1f 100644 --- a/assets/icons/slash.svg +++ b/assets/icons/slash.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/slash_square.svg b/assets/icons/slash_square.svg deleted file mode 100644 index 8f269ddeb5e879dd0f8488af037dd4f35743a0ea..0000000000000000000000000000000000000000 --- a/assets/icons/slash_square.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/sliders.svg b/assets/icons/sliders.svg index 8ab83055eef53a07c84cca255aeca505b07f47c2..20a6a367dc4963f3ecdfe6611076fd6462faa764 100644 --- a/assets/icons/sliders.svg +++ b/assets/icons/sliders.svg @@ -1,8 +1,8 @@ - - - - - - + + + + + + diff --git a/assets/icons/sliders_alt.svg b/assets/icons/sliders_alt.svg deleted file mode 100644 index 36c3feccfede200b4d8192f58a4e3e14fcadc00a..0000000000000000000000000000000000000000 --- a/assets/icons/sliders_alt.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/assets/icons/sliders_vertical.svg b/assets/icons/sliders_vertical.svg deleted file mode 100644 index ab61037a513e8c6c263a89ecb95495598c0eb41c..0000000000000000000000000000000000000000 --- a/assets/icons/sliders_vertical.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/assets/icons/snip.svg b/assets/icons/snip.svg deleted file mode 100644 index 03ae4ce0399fae65f847224aa26695530dbd1a66..0000000000000000000000000000000000000000 --- a/assets/icons/snip.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/space.svg b/assets/icons/space.svg index 63718fb4aa1e1300621d29ca5bddd4e130c4796e..0294c9bf1e64d9b0e0521b88981c7e704bba28e6 100644 --- a/assets/icons/space.svg +++ b/assets/icons/space.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/sparkle.svg b/assets/icons/sparkle.svg index f420f527f138ccbefc73ae375afde3e1b997b449..535c447723cee7913c8fc40494c49d24760f7569 100644 --- a/assets/icons/sparkle.svg +++ b/assets/icons/sparkle.svg @@ -1 +1 @@ - + diff --git a/assets/icons/sparkle_alt.svg b/assets/icons/sparkle_alt.svg deleted file mode 100644 index d5c227b1057adaf05ce843aa5089c9c32cc145e8..0000000000000000000000000000000000000000 --- a/assets/icons/sparkle_alt.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/sparkle_filled.svg b/assets/icons/sparkle_filled.svg deleted file mode 100644 index 96837f618ddd5647cd6ea20f76e26e912493d17c..0000000000000000000000000000000000000000 --- a/assets/icons/sparkle_filled.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/speaker_loud.svg b/assets/icons/speaker_loud.svg deleted file mode 100644 index 68982ee5e92a85f193d80c6f7aa285722d1d78d8..0000000000000000000000000000000000000000 --- a/assets/icons/speaker_loud.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/split.svg b/assets/icons/split.svg index 4c131466c2e2dbb0752f3e6eebbe2b92775550df..b2be46a875b461928715129b560062aacd1fc5f4 100644 --- a/assets/icons/split.svg +++ b/assets/icons/split.svg @@ -1,5 +1,5 @@ - - - - + + + + diff --git a/assets/icons/split_alt.svg b/assets/icons/split_alt.svg index 3f7622701de82f4e960b3608575d59f83aca44ea..2f99e1436fb71220f6853d7e56f3e748b53fff12 100644 --- a/assets/icons/split_alt.svg +++ b/assets/icons/split_alt.svg @@ -1 +1 @@ - + diff --git a/assets/icons/square_dot.svg b/assets/icons/square_dot.svg index 2c1d8afdcb49425e07e6f98c2216afc05ef0b94d..72b32734399a2d9e47ea520a93b6891f0d4664f6 100644 --- a/assets/icons/square_dot.svg +++ b/assets/icons/square_dot.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/square_minus.svg b/assets/icons/square_minus.svg index a9ab42c408fab92cd0e489a2b8a146bda8de0cbc..5ba458e8b53bf6df71a95bd2326e7b1323bae161 100644 --- a/assets/icons/square_minus.svg +++ b/assets/icons/square_minus.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/square_plus.svg b/assets/icons/square_plus.svg index 8cbe3dc0e79500db174cbbec2fed8fe607e98464..063c7dbf8261d98957e3b835f4d8262b155dc396 100644 --- a/assets/icons/square_plus.svg +++ b/assets/icons/square_plus.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/star.svg b/assets/icons/star.svg index fd1502ede8a19ea3781c64430f4b10bc2475a630..b39638e386e9913ab12983ebd0805cc9128a955b 100644 --- a/assets/icons/star.svg +++ b/assets/icons/star.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/star_filled.svg b/assets/icons/star_filled.svg index 89b03ded29621afa9652b09d083c2a682d6990c7..16f64e5cb33c17f879022341094be59316eb5135 100644 --- a/assets/icons/star_filled.svg +++ b/assets/icons/star_filled.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/stop.svg b/assets/icons/stop.svg index 6291a34c089654d26d5da1de6f48e83ae59923ea..cc2bbe9207acf5acd44ff13e93140099d222250b 100644 --- a/assets/icons/stop.svg +++ b/assets/icons/stop.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/stop_filled.svg b/assets/icons/stop_filled.svg deleted file mode 100644 index caf40d197e1798085d1d675a781f3179b8c991e1..0000000000000000000000000000000000000000 --- a/assets/icons/stop_filled.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/supermaven.svg b/assets/icons/supermaven.svg index 19837fbf56ceb653e4b3e5b478747bdaf1654b32..af778c70b71c52a7b8c4d600cad8667744730803 100644 --- a/assets/icons/supermaven.svg +++ b/assets/icons/supermaven.svg @@ -1,8 +1,8 @@ - - - - - - - + + + + + + + diff --git a/assets/icons/supermaven_disabled.svg b/assets/icons/supermaven_disabled.svg index 39ff8a6122d9ce978998795181b640d1ef4b2eed..25eea54cdeffa844b36066d066fe03e0e7452843 100644 --- a/assets/icons/supermaven_disabled.svg +++ b/assets/icons/supermaven_disabled.svg @@ -1,15 +1 @@ - - - - - - - - - - - - - - - + diff --git a/assets/icons/supermaven_error.svg b/assets/icons/supermaven_error.svg index 669322b97d9b5e88a83d032caf03dad48742bebb..a0a12e17c329631978b0c2d2dc6ae30b93028a0e 100644 --- a/assets/icons/supermaven_error.svg +++ b/assets/icons/supermaven_error.svg @@ -1,11 +1,11 @@ - - - - - - + + + + + + - + diff --git a/assets/icons/supermaven_init.svg b/assets/icons/supermaven_init.svg index b919d5559bfd66ca1b85fa3c2ead5bc6d54f3db4..6851aad49dc7e76756bde22bee7ab504da476408 100644 --- a/assets/icons/supermaven_init.svg +++ b/assets/icons/supermaven_init.svg @@ -1,11 +1,11 @@ - - - - - - + + + + + + - + diff --git a/assets/icons/swatch_book.svg b/assets/icons/swatch_book.svg index 985994ffcf6780b6f35a3cb67bc0a461f186d842..b37d5df8c1a5f0f6b9fa9cb46b3004a2ba55da4f 100644 --- a/assets/icons/swatch_book.svg +++ b/assets/icons/swatch_book.svg @@ -1 +1 @@ - + diff --git a/assets/icons/tab.svg b/assets/icons/tab.svg index 49a3536bedd51ee70dd914710801bc9dbaf51616..db93be4df53cb01e07f1a66773b9118e68ed6609 100644 --- a/assets/icons/tab.svg +++ b/assets/icons/tab.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/terminal_alt.svg b/assets/icons/terminal_alt.svg index 7afb89db2130b8d9233c1662d7fcf86f63de305a..d03c05423e24fa8d7c8604050545a8ddd26cc9be 100644 --- a/assets/icons/terminal_alt.svg +++ b/assets/icons/terminal_alt.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/text_snippet.svg b/assets/icons/text_snippet.svg index 255635de6a049dc69f06bdf1dd7c6cb4755dd4b9..b8987546d323de794bf0b7a974162c81a48e5135 100644 --- a/assets/icons/text_snippet.svg +++ b/assets/icons/text_snippet.svg @@ -1 +1 @@ - + diff --git a/assets/icons/text_thread.svg b/assets/icons/text_thread.svg index 75afa934a028f1bddd104effe536db70ad4f241c..aa078c72a2f35d2b82e90f2be64d23fcda3418a5 100644 --- a/assets/icons/text_thread.svg +++ b/assets/icons/text_thread.svg @@ -1,7 +1,7 @@ - - - - - + + + + + diff --git a/assets/icons/thread.svg b/assets/icons/thread.svg index 8c2596a4c9fca9f75a122dc85225f33696320030..496cf42e3a3ee1439f36b8e2479d05564362e628 100644 --- a/assets/icons/thread.svg +++ b/assets/icons/thread.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/thread_from_summary.svg b/assets/icons/thread_from_summary.svg index 7519935affc03bf50e9a39bcb5792237fba1e44f..94ce9562da15e2abc53912a8069f1d4e3f3dd3d8 100644 --- a/assets/icons/thread_from_summary.svg +++ b/assets/icons/thread_from_summary.svg @@ -1,6 +1,6 @@ - - - - + + + + diff --git a/assets/icons/thumbs_down.svg b/assets/icons/thumbs_down.svg index 2edc09acd1634ad0cd94159ea34b1f5aaa905a91..a396ff14f614158382172ff3a8ed682612db541c 100644 --- a/assets/icons/thumbs_down.svg +++ b/assets/icons/thumbs_down.svg @@ -1,3 +1 @@ - - - + diff --git a/assets/icons/thumbs_up.svg b/assets/icons/thumbs_up.svg index ff4406034d8c8b439dda3c5885ff6e744718eb0c..73c859c3557c17dd6fe962dec147ae3275a9aae9 100644 --- a/assets/icons/thumbs_up.svg +++ b/assets/icons/thumbs_up.svg @@ -1,3 +1 @@ - - - + diff --git a/assets/icons/todo_complete.svg b/assets/icons/todo_complete.svg index 9fa2e818bb61137de35d260f4384a0db545d4125..5bf70841a8f2876ac2955c6731d66cbfa2f8a7dd 100644 --- a/assets/icons/todo_complete.svg +++ b/assets/icons/todo_complete.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/todo_pending.svg b/assets/icons/todo_pending.svg index dfb013b52b987a3f99e1b8304418b847ff1ccf2b..e5e9776f11b2ebdaed8ab42039d1a8de80f29ccb 100644 --- a/assets/icons/todo_pending.svg +++ b/assets/icons/todo_pending.svg @@ -1,10 +1,10 @@ - - - - - - - - + + + + + + + + diff --git a/assets/icons/todo_progress.svg b/assets/icons/todo_progress.svg index 9b2ed7375d9807139261a2d81f7f1f168470d0f4..b4a3e8c50e75343435849323d49d3c45cfe3069c 100644 --- a/assets/icons/todo_progress.svg +++ b/assets/icons/todo_progress.svg @@ -1,11 +1,11 @@ - - - - - - - - - + + + + + + + + + diff --git a/assets/icons/tool_copy.svg b/assets/icons/tool_copy.svg index e722d8a022fca603b87fc1859436fcc060355095..a497a5c9cba29861710e3810029f2936aa2f02b1 100644 --- a/assets/icons/tool_copy.svg +++ b/assets/icons/tool_copy.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/tool_delete_file.svg b/assets/icons/tool_delete_file.svg index 3276f3d78e8ca1bb6d79a58845577cb150f545aa..e15c0cb568eae4274a9621ac403aa3393b1d5287 100644 --- a/assets/icons/tool_delete_file.svg +++ b/assets/icons/tool_delete_file.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/tool_diagnostics.svg b/assets/icons/tool_diagnostics.svg index c659d967812727450bc3efb825b6492e6d2eda50..414810628d96cbb6fa662e359d0dec3581afa022 100644 --- a/assets/icons/tool_diagnostics.svg +++ b/assets/icons/tool_diagnostics.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/tool_folder.svg b/assets/icons/tool_folder.svg index 9d3ac299d2b6c9696b6aa28c2148c9d4af9f819d..35f4c1f8acf6796b14c2fb575e2181b771253706 100644 --- a/assets/icons/tool_folder.svg +++ b/assets/icons/tool_folder.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/tool_hammer.svg b/assets/icons/tool_hammer.svg index e66173ce70f39416bbfdbfdb97dfa6f99e1ef3b7..f725012cdf211fcdd0f739e2693caac4da300b49 100644 --- a/assets/icons/tool_hammer.svg +++ b/assets/icons/tool_hammer.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/tool_notification.svg b/assets/icons/tool_notification.svg index 7510b3204000d714e8fb120179cbfc521e1abdd8..7903a3369a5a620fdbefc3a7f1cfe72ee0b98c6f 100644 --- a/assets/icons/tool_notification.svg +++ b/assets/icons/tool_notification.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/tool_pencil.svg b/assets/icons/tool_pencil.svg index b913015c08ae5e7fc2ceb6011bd89925cedc27fe..c4d289e9c06c7fdc6d4e1875f4170c87ef6ea425 100644 --- a/assets/icons/tool_pencil.svg +++ b/assets/icons/tool_pencil.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/tool_read.svg b/assets/icons/tool_read.svg index 458cbb36607a308ae4ce5e6a98006f9ff87461e8..d22e9d8c7da9ba04fe194339d787e40637cf5257 100644 --- a/assets/icons/tool_read.svg +++ b/assets/icons/tool_read.svg @@ -1,7 +1,7 @@ - - - - - + + + + + diff --git a/assets/icons/tool_regex.svg b/assets/icons/tool_regex.svg index 0432cd570fe2341829c40f5d4e629e3b27e24379..818c2ba360bc5aca3d4a7bf8ab65a03a2efe235e 100644 --- a/assets/icons/tool_regex.svg +++ b/assets/icons/tool_regex.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/tool_search.svg b/assets/icons/tool_search.svg index 4f2750cfa2624ff4419c159fda5b62a515b43113..b225a1298eeb627c420cf19c801dded83fffb097 100644 --- a/assets/icons/tool_search.svg +++ b/assets/icons/tool_search.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/tool_terminal.svg b/assets/icons/tool_terminal.svg index 5154fa8e700ce8169cff3ae278708250030fc506..24da5e3a10bc47c8a7c73173c21a1ec2c6cf21b6 100644 --- a/assets/icons/tool_terminal.svg +++ b/assets/icons/tool_terminal.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/tool_think.svg b/assets/icons/tool_think.svg index 54d5ac5fd78f548eb8d9406df624378e08a497e2..efd5908a907b21c573ebc69fc13f5a210ab5d848 100644 --- a/assets/icons/tool_think.svg +++ b/assets/icons/tool_think.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/tool_web.svg b/assets/icons/tool_web.svg index 6250a9f05ab53d2bc364dc7520d10ee319f29f1f..288b54c432dcb4336161d779771c04b045eb6b4f 100644 --- a/assets/icons/tool_web.svg +++ b/assets/icons/tool_web.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/trash.svg b/assets/icons/trash.svg index 1322e90f9fdc1fad9901febff0f71a938621f900..4a9e9add021be23727ec7c8c69a98a593a6bece7 100644 --- a/assets/icons/trash.svg +++ b/assets/icons/trash.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/triangle.svg b/assets/icons/triangle.svg index 0ecf071e2458d731a473a23e44e4d2d1ade636f3..c36d382e73ced53c8bc67fca6e0f94908ec66e1f 100644 --- a/assets/icons/triangle.svg +++ b/assets/icons/triangle.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/triangle_right.svg b/assets/icons/triangle_right.svg index 2c78a316f7cd82d2fa30a0080965e6bb961b9a1c..bb82d8e637c1b4b9e344e01fcdd8d7e5ffc1598c 100644 --- a/assets/icons/triangle_right.svg +++ b/assets/icons/triangle_right.svg @@ -1 +1,3 @@ - + + + diff --git a/assets/icons/undo.svg b/assets/icons/undo.svg index 907cc77195f1e1bd7de75370607b02ab42e7a44c..c714b58747e950ab75d3a02be7eebfe7cd83eda1 100644 --- a/assets/icons/undo.svg +++ b/assets/icons/undo.svg @@ -1 +1 @@ - + diff --git a/assets/icons/update.svg b/assets/icons/update.svg deleted file mode 100644 index b529b2b08b42a17027566a47d20f8ae93d61ae35..0000000000000000000000000000000000000000 --- a/assets/icons/update.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/user_check.svg b/assets/icons/user_check.svg index e5f13feeb4d72a2b289566acb952eda509553baf..ee32a525909a738ffa21b75a2a9690fa5ee8dcaf 100644 --- a/assets/icons/user_check.svg +++ b/assets/icons/user_check.svg @@ -1 +1 @@ - + diff --git a/assets/icons/user_group.svg b/assets/icons/user_group.svg index ac1f7bdc633190f88b202d9e5ae7430af225aecd..30d2e5a7eac519246fe4ee176d107bb2b8cd6598 100644 --- a/assets/icons/user_group.svg +++ b/assets/icons/user_group.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/user_round_pen.svg b/assets/icons/user_round_pen.svg index e25bf104693bd5835a1d9065c4cb65cbad164c06..e684fd1a2006e5435e93e2b6db27d5584ce41090 100644 --- a/assets/icons/user_round_pen.svg +++ b/assets/icons/user_round_pen.svg @@ -1 +1 @@ - + diff --git a/assets/icons/visible.svg b/assets/icons/visible.svg deleted file mode 100644 index 0a7e65d60d7ee8f8a6becebdbec6ae6a381d25e8..0000000000000000000000000000000000000000 --- a/assets/icons/visible.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/wand.svg b/assets/icons/wand.svg deleted file mode 100644 index a6704b1c428324338df86a4f1f0f778c67ec9ac0..0000000000000000000000000000000000000000 --- a/assets/icons/wand.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/warning.svg b/assets/icons/warning.svg index c48a575a90dc224db13a4b7fda8629fb2cd464f4..5af37dab9db2c07c4d6ff505d03e62348d078f53 100644 --- a/assets/icons/warning.svg +++ b/assets/icons/warning.svg @@ -1 +1 @@ - + diff --git a/assets/icons/whole_word.svg b/assets/icons/whole_word.svg index beca4cbe82b760f3e5d218a1158424cc68f9ab4b..ce0d1606c8552f002d7bf58ad7a778a3be9561af 100644 --- a/assets/icons/whole_word.svg +++ b/assets/icons/whole_word.svg @@ -1,5 +1 @@ - - - - - + diff --git a/assets/icons/x.svg b/assets/icons/x.svg deleted file mode 100644 index 5d91a9edd997cbb85ee636958d244006f0b955ce..0000000000000000000000000000000000000000 --- a/assets/icons/x.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/x_circle.svg b/assets/icons/x_circle.svg index 593629beee2510012638795583327b7877938ba9..8807e5fa1fe6912982ab271744f17d13026c13ad 100644 --- a/assets/icons/x_circle.svg +++ b/assets/icons/x_circle.svg @@ -1,4 +1 @@ - - - - + diff --git a/assets/icons/x_circle_filled.svg b/assets/icons/x_circle_filled.svg new file mode 100644 index 0000000000000000000000000000000000000000..52215acda8a6b7fc57820fa90f6ed405e6af637c --- /dev/null +++ b/assets/icons/x_circle_filled.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/zed_agent.svg b/assets/icons/zed_agent.svg new file mode 100644 index 0000000000000000000000000000000000000000..0c80e22c51233fff40b7605d0835b463786b4e84 --- /dev/null +++ b/assets/icons/zed_agent.svg @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assets/icons/zed_assistant.svg b/assets/icons/zed_assistant.svg index d21252de8c234611ddd41caff287e3fc0d540ed3..812277a100b7e6e4ad44de357fc3556b686a90a0 100644 --- a/assets/icons/zed_assistant.svg +++ b/assets/icons/zed_assistant.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/zed_assistant_filled.svg b/assets/icons/zed_assistant_filled.svg deleted file mode 100644 index 8d16fd98499b237319eab48dc34c09e9354cd402..0000000000000000000000000000000000000000 --- a/assets/icons/zed_assistant_filled.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/zed_burn_mode.svg b/assets/icons/zed_burn_mode.svg index 544368d8e06a3b06bb476f07c97e1584a8285cfa..cad6ed666be5edbb1b2d6dced7d0e8990ac90d68 100644 --- a/assets/icons/zed_burn_mode.svg +++ b/assets/icons/zed_burn_mode.svg @@ -1,3 +1,3 @@ - - + + diff --git a/assets/icons/zed_burn_mode_on.svg b/assets/icons/zed_burn_mode_on.svg index 94230b6fd6638748b765d613775de978b5613ee7..10e0e42b1302f5323adee6724c3d43cc59a82d31 100644 --- a/assets/icons/zed_burn_mode_on.svg +++ b/assets/icons/zed_burn_mode_on.svg @@ -1,13 +1 @@ - - - - - - - - - - - - - + diff --git a/assets/icons/zed_mcp_custom.svg b/assets/icons/zed_mcp_custom.svg index 6410a26fcade9d5be5dd494eb24efa6b5985724a..feff2d7d34fb71d4d9064ae0cf5075216f969f75 100644 --- a/assets/icons/zed_mcp_custom.svg +++ b/assets/icons/zed_mcp_custom.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/zed_mcp_extension.svg b/assets/icons/zed_mcp_extension.svg index 996e0c1920c206f1d4ace11179069f77bc103300..00117efcf4e20cb368824ee248b671caedad0b3b 100644 --- a/assets/icons/zed_mcp_extension.svg +++ b/assets/icons/zed_mcp_extension.svg @@ -1,4 +1,4 @@ - + diff --git a/assets/icons/zed_predict.svg b/assets/icons/zed_predict.svg index 79fd8c8fc132d7a2d7bb966086a3ff62c819c5b0..605a0584d52b3163158610ae9a96fbe96fc60806 100644 --- a/assets/icons/zed_predict.svg +++ b/assets/icons/zed_predict.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/zed_predict_down.svg b/assets/icons/zed_predict_down.svg index 4532ad7e26cab76bc4e52a68ea5c766a1ffdca81..79eef9b0b4ad3bc2678371120bfbf5154cdd32fd 100644 --- a/assets/icons/zed_predict_down.svg +++ b/assets/icons/zed_predict_down.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/zed_predict_error.svg b/assets/icons/zed_predict_error.svg index b2dc339fe954f4bbb146d792bc653e2c402a3818..6f75326179bf3a6663ff28bfaecb80bf29878d42 100644 --- a/assets/icons/zed_predict_error.svg +++ b/assets/icons/zed_predict_error.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/zed_predict_up.svg b/assets/icons/zed_predict_up.svg index 61ec143022b4f785affa5183d549a750bd741ab2..f77001e4bddf3094532708ef0313e8b938434781 100644 --- a/assets/icons/zed_predict_up.svg +++ b/assets/icons/zed_predict_up.svg @@ -1,5 +1,5 @@ - - - + + + diff --git a/assets/icons/zed_x_copilot.svg b/assets/icons/zed_x_copilot.svg deleted file mode 100644 index d024678c500640dc53eadeea0987e9c20070629b..0000000000000000000000000000000000000000 --- a/assets/icons/zed_x_copilot.svg +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index c436b1a8fb5a6da4bf7dc086ae01b280062935e0..955e68f5a9f76483456628604df6e52c24dc2e1a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -138,7 +138,7 @@ "find": "buffer_search::Deploy", "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", - "ctrl->": "assistant::QuoteSelection", + "ctrl->": "agent::QuoteSelection", "ctrl-<": "assistant::InsertIntoEditor", "ctrl-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", @@ -239,8 +239,9 @@ "ctrl-shift-a": "agent::ToggleContextPicker", "ctrl-shift-j": "agent::ToggleNavigationMenu", "ctrl-shift-i": "agent::ToggleOptionsMenu", + "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "ctrl->": "assistant::QuoteSelection", + "ctrl->": "agent::QuoteSelection", "ctrl-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", @@ -326,12 +327,20 @@ } }, { - "context": "AcpThread > Editor", + "context": "AcpThread > Editor && !use_modifier_to_send", "use_key_equivalents": true, "bindings": { "enter": "agent::Chat", - "up": "agent::PreviousHistoryMessage", - "down": "agent::NextHistoryMessage", + "shift-ctrl-r": "agent::OpenAgentDiff", + "ctrl-shift-y": "agent::KeepAll", + "ctrl-shift-n": "agent::RejectAll" + } + }, + { + "context": "AcpThread > Editor && use_modifier_to_send", + "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "agent::Chat", "shift-ctrl-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", "ctrl-shift-n": "agent::RejectAll" @@ -1187,7 +1196,8 @@ "ctrl-2": "onboarding::ActivateEditingPage", "ctrl-3": "onboarding::ActivateAISetupPage", "ctrl-escape": "onboarding::Finish", - "alt-tab": "onboarding::SignIn" + "alt-tab": "onboarding::SignIn", + "alt-shift-a": "onboarding::OpenAccount" } } ] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 960bac14797692f7db9336d1c90b7156ee952688..8b18299a911f90507e78e06d4dff411b37276bb5 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -162,7 +162,7 @@ "cmd-alt-f": "buffer_search::DeployReplace", "cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }], "cmd-e": ["buffer_search::Deploy", { "focus": false }], - "cmd->": "assistant::QuoteSelection", + "cmd->": "agent::QuoteSelection", "cmd-<": "assistant::InsertIntoEditor", "cmd-alt-e": "editor::SelectEnclosingSymbol", "alt-enter": "editor::OpenSelectionsInMultibuffer" @@ -279,8 +279,9 @@ "cmd-shift-a": "agent::ToggleContextPicker", "cmd-shift-j": "agent::ToggleNavigationMenu", "cmd-shift-i": "agent::ToggleOptionsMenu", + "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "cmd->": "assistant::QuoteSelection", + "cmd->": "agent::QuoteSelection", "cmd-alt-e": "agent::RemoveAllContext", "cmd-shift-e": "project_panel::ToggleFocus", "cmd-ctrl-b": "agent::ToggleBurnMode", @@ -378,12 +379,20 @@ } }, { - "context": "AcpThread > Editor", + "context": "AcpThread > Editor && !use_modifier_to_send", "use_key_equivalents": true, "bindings": { "enter": "agent::Chat", - "up": "agent::PreviousHistoryMessage", - "down": "agent::NextHistoryMessage", + "shift-ctrl-r": "agent::OpenAgentDiff", + "cmd-shift-y": "agent::KeepAll", + "cmd-shift-n": "agent::RejectAll" + } + }, + { + "context": "AcpThread > Editor && use_modifier_to_send", + "use_key_equivalents": true, + "bindings": { + "cmd-enter": "agent::Chat", "shift-ctrl-r": "agent::OpenAgentDiff", "cmd-shift-y": "agent::KeepAll", "cmd-shift-n": "agent::RejectAll" @@ -1289,7 +1298,8 @@ "cmd-2": "onboarding::ActivateEditingPage", "cmd-3": "onboarding::ActivateAISetupPage", "cmd-escape": "onboarding::Finish", - "alt-tab": "onboarding::SignIn" + "alt-tab": "onboarding::SignIn", + "alt-shift-a": "onboarding::OpenAccount" } } ] diff --git a/assets/keymaps/linux/cursor.json b/assets/keymaps/linux/cursor.json index 1c381b0cf05531e7fd5743d71be1b4d662bb4c0d..2e27158e1167f0840cadfb0d86dc06614f6076c6 100644 --- a/assets/keymaps/linux/cursor.json +++ b/assets/keymaps/linux/cursor.json @@ -17,8 +17,8 @@ "bindings": { "ctrl-i": "agent::ToggleFocus", "ctrl-shift-i": "agent::ToggleFocus", - "ctrl-shift-l": "assistant::QuoteSelection", // In cursor uses "Ask" mode - "ctrl-l": "assistant::QuoteSelection", // In cursor uses "Agent" mode + "ctrl-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode + "ctrl-l": "agent::QuoteSelection", // In cursor uses "Agent" mode "ctrl-k": "assistant::InlineAssist", "ctrl-shift-k": "assistant::InsertIntoEditor" } diff --git a/assets/keymaps/macos/cursor.json b/assets/keymaps/macos/cursor.json index fdf9c437cf395c074e42ae9c9dc53c1aa6ff66c2..1d723bd75bb788aa1ea63335f9fa555cb50d2df0 100644 --- a/assets/keymaps/macos/cursor.json +++ b/assets/keymaps/macos/cursor.json @@ -17,8 +17,8 @@ "bindings": { "cmd-i": "agent::ToggleFocus", "cmd-shift-i": "agent::ToggleFocus", - "cmd-shift-l": "assistant::QuoteSelection", // In cursor uses "Ask" mode - "cmd-l": "assistant::QuoteSelection", // In cursor uses "Agent" mode + "cmd-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode + "cmd-l": "agent::QuoteSelection", // In cursor uses "Agent" mode "cmd-k": "assistant::InlineAssist", "cmd-shift-k": "assistant::InsertIntoEditor" } diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 57edb1e4c1c534ce90b6c5534d4ceebddd4f9a38..be6d34a1342b6fabe0561643c74034d3c99a04b6 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -58,6 +58,8 @@ "[ space": "vim::InsertEmptyLineAbove", "[ e": "editor::MoveLineUp", "] e": "editor::MoveLineDown", + "[ f": "workspace::FollowNextCollaborator", + "] f": "workspace::FollowNextCollaborator", // Word motions "w": "vim::NextWordStart", @@ -333,10 +335,14 @@ "ctrl-x ctrl-c": "editor::ShowEditPrediction", // zed specific "ctrl-x ctrl-l": "editor::ToggleCodeActions", // zed specific "ctrl-x ctrl-z": "editor::Cancel", + "ctrl-x ctrl-e": "vim::LineDown", + "ctrl-x ctrl-y": "vim::LineUp", "ctrl-w": "editor::DeleteToPreviousWordStart", "ctrl-u": "editor::DeleteToBeginningOfLine", "ctrl-t": "vim::Indent", "ctrl-d": "vim::Outdent", + "ctrl-y": "vim::InsertFromAbove", + "ctrl-e": "vim::InsertFromBelow", "ctrl-k": ["vim::PushDigraph", {}], "ctrl-v": ["vim::PushLiteral", {}], "ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use. @@ -386,7 +392,7 @@ "right": "vim::WrappingRight", "h": "vim::WrappingLeft", "l": "vim::WrappingRight", - "y": "editor::Copy", + "y": "vim::HelixYank", "alt-;": "vim::OtherEnd", "ctrl-r": "vim::Redo", "f": ["vim::PushFindForward", { "before": false, "multiline": true }], @@ -403,6 +409,7 @@ "g w": "vim::PushRewrap", "insert": "vim::InsertBefore", "alt-.": "vim::RepeatFind", + "alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }], // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", "] x": "editor::SelectSmallerSyntaxNode", diff --git a/assets/settings/default.json b/assets/settings/default.json index 9c579b858db166001cb564840ae4164109d96fee..c290baf0038d1b731f041e9c828746758bf9ffe3 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -28,7 +28,9 @@ "edit_prediction_provider": "zed" }, // The name of a font to use for rendering text in the editor - "buffer_font_family": "Zed Plex Mono", + // ".ZedMono" currently aliases to Lilex + // but this may change in the future. + "buffer_font_family": ".ZedMono", // Set the buffer text's font fallbacks, this will be merged with // the platform's default fallbacks. "buffer_font_fallbacks": null, @@ -54,7 +56,9 @@ "buffer_line_height": "comfortable", // The name of a font to use for rendering text in the UI // You can set this to ".SystemUIFont" to use the system font - "ui_font_family": "Zed Plex Sans", + // ".ZedSans" currently aliases to "IBM Plex Sans", but this may + // change in the future + "ui_font_family": ".ZedSans", // Set the UI's font fallbacks, this will be merged with the platform's // default font fallbacks. "ui_font_fallbacks": null, @@ -67,8 +71,8 @@ "ui_font_weight": 400, // The default font size for text in the UI "ui_font_size": 16, - // The default font size for text in the agent panel - "agent_font_size": 16, + // The default font size for text in the agent panel. Falls back to the UI font size if unset. + "agent_font_size": null, // How much to fade out unused code. "unnecessary_code_fade": 0.3, // Active pane styling settings. @@ -82,10 +86,10 @@ // Layout mode of the bottom dock. Defaults to "contained" // choices: contained, full, left_aligned, right_aligned "bottom_dock_layout": "contained", - // The direction that you want to split panes horizontally. Defaults to "up" - "pane_split_direction_horizontal": "up", - // The direction that you want to split panes vertically. Defaults to "left" - "pane_split_direction_vertical": "left", + // The direction that you want to split panes horizontally. Defaults to "down" + "pane_split_direction_horizontal": "down", + // The direction that you want to split panes vertically. Defaults to "right" + "pane_split_direction_vertical": "right", // Centered layout related settings. "centered_layout": { // The relative width of the left padding of the central pane from the @@ -282,6 +286,8 @@ // bracket, brace, single or double quote characters. // For example, when you select text and type (, Zed will surround the text with (). "use_auto_surround": true, + /// Whether indentation should be adjusted based on the context whilst typing. + "auto_indent": true, // Whether indentation of pasted content should be adjusted based on the context. "auto_indent_on_paste": true, // Controls how the editor handles the autoclosed characters. @@ -711,7 +717,7 @@ // Can be 'never', 'always', or 'when_in_call', // or a boolean (interpreted as 'never'/'always'). "button": "when_in_call", - // Where to the chat panel. Can be 'left' or 'right'. + // Where to dock the chat panel. Can be 'left' or 'right'. "dock": "right", // Default width of the chat panel. "default_width": 240 @@ -719,7 +725,7 @@ "git_panel": { // Whether to show the git panel button in the status bar. "button": true, - // Where to show the git panel. Can be 'left' or 'right'. + // Where to dock the git panel. Can be 'left' or 'right'. "dock": "left", // Default width of the git panel. "default_width": 360, @@ -883,11 +889,6 @@ }, // The settings for slash commands. "slash_commands": { - // Settings for the `/docs` slash command. - "docs": { - // Whether `/docs` is enabled. - "enabled": false - }, // Settings for the `/project` slash command. "project": { // Whether `/project` is enabled. @@ -1210,7 +1211,18 @@ // Any addition to this list will be merged with the default list. // Globs are matched relative to the worktree root, // except when starting with a slash (/) or equivalent in Windows. - "disabled_globs": ["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/.dev.vars", "**/secrets.yml"], + "disabled_globs": [ + "**/.env*", + "**/*.pem", + "**/*.key", + "**/*.cert", + "**/*.crt", + "**/.dev.vars", + "**/secrets.yml", + "**/.zed/settings.json", // zed project settings + "/**/zed/settings.json", // zed user settings + "/**/zed/keymap.json" + ], // When to show edit predictions previews in buffer. // This setting takes two possible values: // 1. Display predictions inline when there are no language server completions available. @@ -1241,7 +1253,9 @@ // Status bar-related settings. "status_bar": { // Whether to show the active language button in the status bar. - "active_language_button": true + "active_language_button": true, + // Whether to show the cursor position button in the status bar. + "cursor_position_button": true }, // Settings specific to the terminal "terminal": { @@ -1391,7 +1405,7 @@ // "font_size": 15, // Set the terminal's font family. If this option is not included, // the terminal will default to matching the buffer's font family. - // "font_family": "Zed Plex Mono", + // "font_family": ".ZedMono", // Set the terminal's font fallbacks. If this option is not included, // the terminal will default to matching the buffer's font fallbacks. // This will be merged with the platform's default font fallbacks diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 384ad28272f0239bc18b59ddb43b290aafe5920b..23ebbcc67efaa9ca45748a5726ac1fd72488c451 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -86,9 +86,9 @@ "terminal.ansi.blue": "#74ade8ff", "terminal.ansi.bright_blue": "#385378ff", "terminal.ansi.dim_blue": "#bed5f4ff", - "terminal.ansi.magenta": "#be5046ff", - "terminal.ansi.bright_magenta": "#5e2b26ff", - "terminal.ansi.dim_magenta": "#e6a79eff", + "terminal.ansi.magenta": "#b477cfff", + "terminal.ansi.bright_magenta": "#d6b4e4ff", + "terminal.ansi.dim_magenta": "#612a79ff", "terminal.ansi.cyan": "#6eb4bfff", "terminal.ansi.bright_cyan": "#3a565bff", "terminal.ansi.dim_cyan": "#b9d9dfff", diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index 1831c7e4733a58a889d082b8276acecc8dd186bb..eab756db51885b8b2e2797bbf0303937f19fefb9 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -13,27 +13,35 @@ path = "src/acp_thread.rs" doctest = false [features] -test-support = ["gpui/test-support", "project/test-support"] +test-support = ["gpui/test-support", "project/test-support", "dep:parking_lot"] [dependencies] +action_log.workspace = true agent-client-protocol.workspace = true anyhow.workspace = true -assistant_tool.workspace = true buffer_diff.workspace = true +collections.workspace = true editor.workspace = true +file_icons.workspace = true futures.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true language_model.workspace = true markdown.workspace = true +parking_lot = { workspace = true, optional = true } project.workspace = true +prompt_store.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true smol.workspace = true +terminal.workspace = true ui.workspace = true +url.workspace = true util.workspace = true +uuid.workspace = true +watch.workspace = true workspace-hack.workspace = true [dev-dependencies] diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 443375a51b08c510ab8f34abef297c530abfcb3e..61bc50576abd8a53433d5738acb98f245ec92764 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -1,86 +1,68 @@ mod connection; -pub use connection::*; +mod diff; +mod mention; +mod terminal; +use collections::HashSet; +pub use connection::*; +pub use diff::*; +use language::language_settings::FormatOnSave; +pub use mention::*; +use project::lsp_store::{FormatTrigger, LspFormatTarget}; +use serde::{Deserialize, Serialize}; +pub use terminal::*; + +use action_log::ActionLog; use agent_client_protocol as acp; -use anyhow::{Context as _, Result}; -use assistant_tool::ActionLog; -use buffer_diff::BufferDiff; -use editor::{Bias, MultiBuffer, PathKey}; +use anyhow::{Context as _, Result, anyhow}; +use editor::Bias; use futures::{FutureExt, channel::oneshot, future::BoxFuture}; -use gpui::{AppContext, Context, Entity, EventEmitter, SharedString, Task}; +use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity}; use itertools::Itertools; -use language::{ - Anchor, Buffer, BufferSnapshot, Capability, LanguageRegistry, OffsetRangeExt as _, Point, - text_diff, -}; +use language::{Anchor, Buffer, BufferSnapshot, LanguageRegistry, Point, ToPoint, text_diff}; use markdown::Markdown; -use project::{AgentLocation, Project}; +use project::{AgentLocation, Project, git_store::GitStoreCheckpoint}; use std::collections::HashMap; use std::error::Error; -use std::fmt::Formatter; +use std::fmt::{Formatter, Write}; +use std::ops::Range; use std::process::ExitStatus; use std::rc::Rc; -use std::{ - fmt::Display, - mem, - path::{Path, PathBuf}, - sync::Arc, -}; +use std::time::{Duration, Instant}; +use std::{fmt::Display, mem, path::PathBuf, sync::Arc}; use ui::App; use util::ResultExt; #[derive(Debug)] pub struct UserMessage { + pub id: Option, pub content: ContentBlock, -} - -impl UserMessage { - pub fn from_acp( - message: impl IntoIterator, - language_registry: Arc, - cx: &mut App, - ) -> Self { - let mut content = ContentBlock::Empty; - for chunk in message { - content.append(chunk, &language_registry, cx) - } - Self { content: content } - } - - fn to_markdown(&self, cx: &App) -> String { - format!("## User\n\n{}\n\n", self.content.to_markdown(cx)) - } + pub chunks: Vec, + pub checkpoint: Option, } #[derive(Debug)] -pub struct MentionPath<'a>(&'a Path); - -impl<'a> MentionPath<'a> { - const PREFIX: &'static str = "@file:"; - - pub fn new(path: &'a Path) -> Self { - MentionPath(path) - } - - pub fn try_parse(url: &'a str) -> Option { - let path = url.strip_prefix(Self::PREFIX)?; - Some(MentionPath(Path::new(path))) - } - - pub fn path(&self) -> &Path { - self.0 - } +pub struct Checkpoint { + git_checkpoint: GitStoreCheckpoint, + pub show: bool, } -impl Display for MentionPath<'_> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "[@{}]({}{})", - self.0.file_name().unwrap_or_default().display(), - Self::PREFIX, - self.0.display() - ) +impl UserMessage { + fn to_markdown(&self, cx: &App) -> String { + let mut markdown = String::new(); + if self + .checkpoint + .as_ref() + .is_some_and(|checkpoint| checkpoint.show) + { + writeln!(markdown, "## User (checkpoint)").unwrap(); + } else { + writeln!(markdown, "## User").unwrap(); + } + writeln!(markdown).unwrap(); + writeln!(markdown, "{}", self.content.to_markdown(cx)).unwrap(); + writeln!(markdown).unwrap(); + markdown } } @@ -132,7 +114,7 @@ pub enum AgentThreadEntry { } impl AgentThreadEntry { - fn to_markdown(&self, cx: &App) -> String { + pub fn to_markdown(&self, cx: &App) -> String { match self { Self::UserMessage(message) => message.to_markdown(cx), Self::AssistantMessage(message) => message.to_markdown(cx), @@ -140,7 +122,15 @@ impl AgentThreadEntry { } } - pub fn diffs(&self) -> impl Iterator { + pub fn user_message(&self) -> Option<&UserMessage> { + if let AgentThreadEntry::UserMessage(message) = self { + Some(message) + } else { + None + } + } + + pub fn diffs(&self) -> impl Iterator> { if let AgentThreadEntry::ToolCall(call) = self { itertools::Either::Left(call.diffs()) } else { @@ -148,9 +138,25 @@ impl AgentThreadEntry { } } - pub fn locations(&self) -> Option<&[acp::ToolCallLocation]> { - if let AgentThreadEntry::ToolCall(ToolCall { locations, .. }) = self { - Some(locations) + pub fn terminals(&self) -> impl Iterator> { + if let AgentThreadEntry::ToolCall(call) = self { + itertools::Either::Left(call.terminals()) + } else { + itertools::Either::Right(std::iter::empty()) + } + } + + pub fn location(&self, ix: usize) -> Option<(acp::ToolCallLocation, AgentLocation)> { + if let AgentThreadEntry::ToolCall(ToolCall { + locations, + resolved_locations, + .. + }) = self + { + Some(( + locations.get(ix)?.clone(), + resolved_locations.get(ix)?.clone()?, + )) } else { None } @@ -165,6 +171,7 @@ pub struct ToolCall { pub content: Vec, pub status: ToolCallStatus, pub locations: Vec, + pub resolved_locations: Vec>, pub raw_input: Option, pub raw_output: Option, } @@ -193,13 +200,14 @@ impl ToolCall { .map(|content| ToolCallContent::from_acp(content, language_registry.clone(), cx)) .collect(), locations: tool_call.locations, + resolved_locations: Vec::default(), status, raw_input: tool_call.raw_input, raw_output: tool_call.raw_output, } } - fn update( + fn update_fields( &mut self, fields: acp::ToolCallUpdateFields, language_registry: Arc, @@ -220,7 +228,7 @@ impl ToolCall { } if let Some(status) = status { - self.status = ToolCallStatus::Allowed { status }; + self.status = status.into(); } if let Some(title) = title { @@ -245,14 +253,31 @@ impl ToolCall { } if let Some(raw_output) = raw_output { + if self.content.is_empty() + && let Some(markdown) = markdown_for_raw_output(&raw_output, &language_registry, cx) + { + self.content + .push(ToolCallContent::ContentBlock(ContentBlock::Markdown { + markdown, + })); + } self.raw_output = Some(raw_output); } } - pub fn diffs(&self) -> impl Iterator { + pub fn diffs(&self) -> impl Iterator> { + self.content.iter().filter_map(|content| match content { + ToolCallContent::Diff(diff) => Some(diff), + ToolCallContent::ContentBlock(_) => None, + ToolCallContent::Terminal(_) => None, + }) + } + + pub fn terminals(&self) -> impl Iterator> { self.content.iter().filter_map(|content| match content { - ToolCallContent::ContentBlock { .. } => None, - ToolCallContent::Diff { diff } => Some(diff), + ToolCallContent::Terminal(terminal) => Some(terminal), + ToolCallContent::ContentBlock(_) => None, + ToolCallContent::Diff(_) => None, }) } @@ -268,34 +293,101 @@ impl ToolCall { } markdown } + + async fn resolve_location( + location: acp::ToolCallLocation, + project: WeakEntity, + cx: &mut AsyncApp, + ) -> Option { + let buffer = project + .update(cx, |project, cx| { + project + .project_path_for_absolute_path(&location.path, cx) + .map(|path| project.open_buffer(path, cx)) + }) + .ok()??; + let buffer = buffer.await.log_err()?; + let position = buffer + .update(cx, |buffer, _| { + if let Some(row) = location.line { + let snapshot = buffer.snapshot(); + let column = snapshot.indent_size_for_line(row).len; + let point = snapshot.clip_point(Point::new(row, column), Bias::Left); + snapshot.anchor_before(point) + } else { + Anchor::MIN + } + }) + .ok()?; + + Some(AgentLocation { + buffer: buffer.downgrade(), + position, + }) + } + + fn resolve_locations( + &self, + project: Entity, + cx: &mut App, + ) -> Task>> { + let locations = self.locations.clone(); + project.update(cx, |_, cx| { + cx.spawn(async move |project, cx| { + let mut new_locations = Vec::new(); + for location in locations { + new_locations.push(Self::resolve_location(location, project.clone(), cx).await); + } + new_locations + }) + }) + } } #[derive(Debug)] pub enum ToolCallStatus { + /// The tool call hasn't started running yet, but we start showing it to + /// the user. + Pending, + /// The tool call is waiting for confirmation from the user. WaitingForConfirmation { options: Vec, respond_tx: oneshot::Sender, }, - Allowed { - status: acp::ToolCallStatus, - }, + /// The tool call is currently running. + InProgress, + /// The tool call completed successfully. + Completed, + /// The tool call failed. + Failed, + /// The user rejected the tool call. Rejected, + /// The user canceled generation so the tool call was canceled. Canceled, } +impl From for ToolCallStatus { + fn from(status: acp::ToolCallStatus) -> Self { + match status { + acp::ToolCallStatus::Pending => Self::Pending, + acp::ToolCallStatus::InProgress => Self::InProgress, + acp::ToolCallStatus::Completed => Self::Completed, + acp::ToolCallStatus::Failed => Self::Failed, + } + } +} + impl Display for ToolCallStatus { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!( f, "{}", match self { + ToolCallStatus::Pending => "Pending", ToolCallStatus::WaitingForConfirmation { .. } => "Waiting for confirmation", - ToolCallStatus::Allowed { status } => match status { - acp::ToolCallStatus::Pending => "Pending", - acp::ToolCallStatus::InProgress => "In Progress", - acp::ToolCallStatus::Completed => "Completed", - acp::ToolCallStatus::Failed => "Failed", - }, + ToolCallStatus::InProgress => "In Progress", + ToolCallStatus::Completed => "Completed", + ToolCallStatus::Failed => "Failed", ToolCallStatus::Rejected => "Rejected", ToolCallStatus::Canceled => "Canceled", } @@ -307,6 +399,7 @@ impl Display for ToolCallStatus { pub enum ContentBlock { Empty, Markdown { markdown: Entity }, + ResourceLink { resource_link: acp::ResourceLink }, } impl ContentBlock { @@ -338,43 +431,78 @@ impl ContentBlock { language_registry: &Arc, cx: &mut App, ) { - let new_content = match block { - acp::ContentBlock::Text(text_content) => text_content.text.clone(), - acp::ContentBlock::ResourceLink(resource_link) => { - if let Some(path) = resource_link.uri.strip_prefix("file://") { - format!("{}", MentionPath(path.as_ref())) - } else { - resource_link.uri.clone() - } - } - acp::ContentBlock::Image(_) - | acp::ContentBlock::Audio(_) - | acp::ContentBlock::Resource(_) => String::new(), - }; + if matches!(self, ContentBlock::Empty) + && let acp::ContentBlock::ResourceLink(resource_link) = block + { + *self = ContentBlock::ResourceLink { resource_link }; + return; + } + + let new_content = self.block_string_contents(block); match self { ContentBlock::Empty => { - *self = ContentBlock::Markdown { - markdown: cx.new(|cx| { - Markdown::new( - new_content.into(), - Some(language_registry.clone()), - None, - cx, - ) - }), - }; + *self = Self::create_markdown_block(new_content, language_registry, cx); } ContentBlock::Markdown { markdown } => { markdown.update(cx, |markdown, cx| markdown.append(&new_content, cx)); } + ContentBlock::ResourceLink { resource_link } => { + let existing_content = Self::resource_link_md(&resource_link.uri); + let combined = format!("{}\n{}", existing_content, new_content); + + *self = Self::create_markdown_block(combined, language_registry, cx); + } + } + } + + fn create_markdown_block( + content: String, + language_registry: &Arc, + cx: &mut App, + ) -> ContentBlock { + ContentBlock::Markdown { + markdown: cx + .new(|cx| Markdown::new(content.into(), Some(language_registry.clone()), None, cx)), + } + } + + fn block_string_contents(&self, block: acp::ContentBlock) -> String { + match block { + acp::ContentBlock::Text(text_content) => text_content.text, + acp::ContentBlock::ResourceLink(resource_link) => { + Self::resource_link_md(&resource_link.uri) + } + acp::ContentBlock::Resource(acp::EmbeddedResource { + resource: + acp::EmbeddedResourceResource::TextResourceContents(acp::TextResourceContents { + uri, + .. + }), + .. + }) => Self::resource_link_md(&uri), + acp::ContentBlock::Image(image) => Self::image_md(&image), + acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(), + } + } + + fn resource_link_md(uri: &str) -> String { + if let Some(uri) = MentionUri::parse(uri).log_err() { + uri.as_link().to_string() + } else { + uri.to_string() } } + fn image_md(_image: &acp::ImageContent) -> String { + "`Image`".into() + } + fn to_markdown<'a>(&'a self, cx: &'a App) -> &'a str { match self { ContentBlock::Empty => "", ContentBlock::Markdown { markdown } => markdown.read(cx).source(), + ContentBlock::ResourceLink { resource_link } => &resource_link.uri, } } @@ -382,14 +510,23 @@ impl ContentBlock { match self { ContentBlock::Empty => None, ContentBlock::Markdown { markdown } => Some(markdown), + ContentBlock::ResourceLink { .. } => None, + } + } + + pub fn resource_link(&self) -> Option<&acp::ResourceLink> { + match self { + ContentBlock::ResourceLink { resource_link } => Some(resource_link), + _ => None, } } } #[derive(Debug)] pub enum ToolCallContent { - ContentBlock { content: ContentBlock }, - Diff { diff: Diff }, + ContentBlock(ContentBlock), + Diff(Entity), + Terminal(Entity), } impl ToolCallContent { @@ -399,118 +536,75 @@ impl ToolCallContent { cx: &mut App, ) -> Self { match content { - acp::ToolCallContent::Content { content } => Self::ContentBlock { - content: ContentBlock::new(content, &language_registry, cx), - }, - acp::ToolCallContent::Diff { diff } => Self::Diff { - diff: Diff::from_acp(diff, language_registry, cx), - }, + acp::ToolCallContent::Content { content } => { + Self::ContentBlock(ContentBlock::new(content, &language_registry, cx)) + } + acp::ToolCallContent::Diff { diff } => Self::Diff(cx.new(|cx| { + Diff::finalized( + diff.path, + diff.old_text, + diff.new_text, + language_registry, + cx, + ) + })), } } pub fn to_markdown(&self, cx: &App) -> String { match self { - Self::ContentBlock { content } => content.to_markdown(cx).to_string(), - Self::Diff { diff } => diff.to_markdown(cx), + Self::ContentBlock(content) => content.to_markdown(cx).to_string(), + Self::Diff(diff) => diff.read(cx).to_markdown(cx), + Self::Terminal(terminal) => terminal.read(cx).to_markdown(cx), } } } -#[derive(Debug)] -pub struct Diff { - pub multibuffer: Entity, - pub path: PathBuf, - _task: Task>, +#[derive(Debug, PartialEq)] +pub enum ToolCallUpdate { + UpdateFields(acp::ToolCallUpdate), + UpdateDiff(ToolCallUpdateDiff), + UpdateTerminal(ToolCallUpdateTerminal), } -impl Diff { - pub fn from_acp( - diff: acp::Diff, - language_registry: Arc, - cx: &mut App, - ) -> Self { - let acp::Diff { - path, - old_text, - new_text, - } = diff; - - let multibuffer = cx.new(|_cx| MultiBuffer::without_headers(Capability::ReadOnly)); - - let new_buffer = cx.new(|cx| Buffer::local(new_text, cx)); - let old_buffer = cx.new(|cx| Buffer::local(old_text.unwrap_or("".into()), cx)); - let new_buffer_snapshot = new_buffer.read(cx).text_snapshot(); - let buffer_diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot, cx)); - - let task = cx.spawn({ - let multibuffer = multibuffer.clone(); - let path = path.clone(); - async move |cx| { - let language = language_registry - .language_for_file_path(&path) - .await - .log_err(); - - new_buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?; - - let old_buffer_snapshot = old_buffer.update(cx, |buffer, cx| { - buffer.set_language(language, cx); - buffer.snapshot() - })?; +impl ToolCallUpdate { + fn id(&self) -> &acp::ToolCallId { + match self { + Self::UpdateFields(update) => &update.id, + Self::UpdateDiff(diff) => &diff.id, + Self::UpdateTerminal(terminal) => &terminal.id, + } + } +} - buffer_diff - .update(cx, |diff, cx| { - diff.set_base_text( - old_buffer_snapshot, - Some(language_registry), - new_buffer_snapshot, - cx, - ) - })? - .await?; +impl From for ToolCallUpdate { + fn from(update: acp::ToolCallUpdate) -> Self { + Self::UpdateFields(update) + } +} - multibuffer - .update(cx, |multibuffer, cx| { - let hunk_ranges = { - let buffer = new_buffer.read(cx); - let diff = buffer_diff.read(cx); - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, cx) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(&buffer)) - .collect::>() - }; - - multibuffer.set_excerpts_for_path( - PathKey::for_buffer(&new_buffer, cx), - new_buffer.clone(), - hunk_ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, - cx, - ); - multibuffer.add_diff(buffer_diff, cx); - }) - .log_err(); +impl From for ToolCallUpdate { + fn from(diff: ToolCallUpdateDiff) -> Self { + Self::UpdateDiff(diff) + } +} - anyhow::Ok(()) - } - }); +#[derive(Debug, PartialEq)] +pub struct ToolCallUpdateDiff { + pub id: acp::ToolCallId, + pub diff: Entity, +} - Self { - multibuffer, - path, - _task: task, - } +impl From for ToolCallUpdate { + fn from(terminal: ToolCallUpdateTerminal) -> Self { + Self::UpdateTerminal(terminal) } +} - fn to_markdown(&self, cx: &App) -> String { - let buffer_text = self - .multibuffer - .read(cx) - .all_buffers() - .iter() - .map(|buffer| buffer.read(cx).text()) - .join("\n"); - format!("Diff: {}\n```\n{}\n```\n", self.path.display(), buffer_text) - } +#[derive(Debug, PartialEq)] +pub struct ToolCallUpdateTerminal { + pub id: acp::ToolCallId, + pub terminal: Entity, } #[derive(Debug, Default)] @@ -572,6 +666,52 @@ impl PlanEntry { } } +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct TokenUsage { + pub max_tokens: u64, + pub used_tokens: u64, +} + +impl TokenUsage { + pub fn ratio(&self) -> TokenUsageRatio { + #[cfg(debug_assertions)] + let warning_threshold: f32 = std::env::var("ZED_THREAD_WARNING_THRESHOLD") + .unwrap_or("0.8".to_string()) + .parse() + .unwrap(); + #[cfg(not(debug_assertions))] + let warning_threshold: f32 = 0.8; + + // When the maximum is unknown because there is no selected model, + // avoid showing the token limit warning. + if self.max_tokens == 0 { + TokenUsageRatio::Normal + } else if self.used_tokens >= self.max_tokens { + TokenUsageRatio::Exceeded + } else if self.used_tokens as f32 / self.max_tokens as f32 >= warning_threshold { + TokenUsageRatio::Warning + } else { + TokenUsageRatio::Normal + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TokenUsageRatio { + Normal, + Warning, + Exceeded, +} + +#[derive(Debug, Clone)] +pub struct RetryStatus { + pub last_error: SharedString, + pub attempt: usize, + pub max_attempts: usize, + pub started_at: Instant, + pub duration: Duration, +} + pub struct AcpThread { title: SharedString, entries: Vec, @@ -582,15 +722,21 @@ pub struct AcpThread { send_task: Option>, connection: Rc, session_id: acp::SessionId, + token_usage: Option, } +#[derive(Debug)] pub enum AcpThreadEvent { NewEntry, + TitleUpdated, + TokenUsageUpdated, EntryUpdated(usize), + EntriesRemoved(Range), ToolAuthorizationRequired, + Retry(RetryStatus), Stopped, Error, - ServerExited(ExitStatus), + LoadError(LoadError), } impl EventEmitter for AcpThread {} @@ -604,20 +750,30 @@ pub enum ThreadStatus { #[derive(Debug, Clone)] pub enum LoadError { + NotInstalled { + error_message: SharedString, + install_message: SharedString, + install_command: String, + }, Unsupported { error_message: SharedString, upgrade_message: SharedString, upgrade_command: String, }, - Exited(i32), + Exited { + status: ExitStatus, + }, Other(SharedString), } impl Display for LoadError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - LoadError::Unsupported { error_message, .. } => write!(f, "{}", error_message), - LoadError::Exited(status) => write!(f, "Server exited with status {}", status), + LoadError::NotInstalled { error_message, .. } + | LoadError::Unsupported { error_message, .. } => { + write!(f, "{error_message}") + } + LoadError::Exited { status } => write!(f, "Server exited with status {status}"), LoadError::Other(msg) => write!(f, "{}", msg), } } @@ -630,11 +786,9 @@ impl AcpThread { title: impl Into, connection: Rc, project: Entity, + action_log: Entity, session_id: acp::SessionId, - cx: &mut Context, ) -> Self { - let action_log = cx.new(|_| ActionLog::new(project.clone())); - Self { action_log, shared_buffers: Default::default(), @@ -645,9 +799,14 @@ impl AcpThread { send_task: None, connection, session_id, + token_usage: None, } } + pub fn connection(&self) -> &Rc { + &self.connection + } + pub fn action_log(&self) -> &Entity { &self.action_log } @@ -680,17 +839,17 @@ impl AcpThread { } } + pub fn token_usage(&self) -> Option<&TokenUsage> { + self.token_usage.as_ref() + } + pub fn has_pending_edit_tool_calls(&self) -> bool { for entry in self.entries.iter().rev() { match entry { AgentThreadEntry::UserMessage(_) => return false, AgentThreadEntry::ToolCall( call @ ToolCall { - status: - ToolCallStatus::Allowed { - status: - acp::ToolCallStatus::InProgress | acp::ToolCallStatus::Pending, - }, + status: ToolCallStatus::InProgress | ToolCallStatus::Pending, .. }, ) if call.diffs().next().is_some() => { @@ -719,10 +878,10 @@ impl AcpThread { &mut self, update: acp::SessionUpdate, cx: &mut Context, - ) -> Result<()> { + ) -> Result<(), acp::Error> { match update { acp::SessionUpdate::UserMessageChunk { content } => { - self.push_user_content_block(content, cx); + self.push_user_content_block(None, content, cx); } acp::SessionUpdate::AgentMessageChunk { content } => { self.push_assistant_content_block(content, false, cx); @@ -731,7 +890,7 @@ impl AcpThread { self.push_assistant_content_block(content, true, cx); } acp::SessionUpdate::ToolCall(tool_call) => { - self.upsert_tool_call(tool_call, cx); + self.upsert_tool_call(tool_call, cx)?; } acp::SessionUpdate::ToolCallUpdate(tool_call_update) => { self.update_tool_call(tool_call_update, cx)?; @@ -743,18 +902,39 @@ impl AcpThread { Ok(()) } - pub fn push_user_content_block(&mut self, chunk: acp::ContentBlock, cx: &mut Context) { + pub fn push_user_content_block( + &mut self, + message_id: Option, + chunk: acp::ContentBlock, + cx: &mut Context, + ) { let language_registry = self.project.read(cx).languages().clone(); let entries_len = self.entries.len(); if let Some(last_entry) = self.entries.last_mut() - && let AgentThreadEntry::UserMessage(UserMessage { content }) = last_entry + && let AgentThreadEntry::UserMessage(UserMessage { + id, + content, + chunks, + .. + }) = last_entry { - content.append(chunk, &language_registry, cx); - cx.emit(AcpThreadEvent::EntryUpdated(entries_len - 1)); + *id = message_id.or(id.take()); + content.append(chunk.clone(), &language_registry, cx); + chunks.push(chunk); + let idx = entries_len - 1; + cx.emit(AcpThreadEvent::EntryUpdated(idx)); } else { - let content = ContentBlock::new(chunk, &language_registry, cx); - self.push_entry(AgentThreadEntry::UserMessage(UserMessage { content }), cx); + let content = ContentBlock::new(chunk.clone(), &language_registry, cx); + self.push_entry( + AgentThreadEntry::UserMessage(UserMessage { + id: message_id, + content, + chunks: vec![chunk], + checkpoint: None, + }), + cx, + ); } } @@ -769,7 +949,8 @@ impl AcpThread { if let Some(last_entry) = self.entries.last_mut() && let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry { - cx.emit(AcpThreadEvent::EntryUpdated(entries_len - 1)); + let idx = entries_len - 1; + cx.emit(AcpThreadEvent::EntryUpdated(idx)); match (chunks.last_mut(), is_thought) { (Some(AssistantMessageChunk::Message { block }), false) | (Some(AssistantMessageChunk::Thought { block }), true) => { @@ -806,17 +987,53 @@ impl AcpThread { cx.emit(AcpThreadEvent::NewEntry); } + pub fn update_title(&mut self, title: SharedString, cx: &mut Context) -> Result<()> { + self.title = title; + cx.emit(AcpThreadEvent::TitleUpdated); + Ok(()) + } + + pub fn update_token_usage(&mut self, usage: Option, cx: &mut Context) { + self.token_usage = usage; + cx.emit(AcpThreadEvent::TokenUsageUpdated); + } + + pub fn update_retry_status(&mut self, status: RetryStatus, cx: &mut Context) { + cx.emit(AcpThreadEvent::Retry(status)); + } + pub fn update_tool_call( &mut self, - update: acp::ToolCallUpdate, + update: impl Into, cx: &mut Context, ) -> Result<()> { + let update = update.into(); let languages = self.project.read(cx).languages().clone(); let (ix, current_call) = self - .tool_call_mut(&update.id) + .tool_call_mut(update.id()) .context("Tool call not found")?; - current_call.update(update.fields, languages, cx); + match update { + ToolCallUpdate::UpdateFields(update) => { + let location_updated = update.fields.locations.is_some(); + current_call.update_fields(update.fields, languages, cx); + if location_updated { + self.resolve_locations(update.id, cx); + } + } + ToolCallUpdate::UpdateDiff(update) => { + current_call.content.clear(); + current_call + .content + .push(ToolCallContent::Diff(update.diff)); + } + ToolCallUpdate::UpdateTerminal(update) => { + current_call.content.clear(); + current_call + .content + .push(ToolCallContent::Terminal(update.terminal)); + } + } cx.emit(AcpThreadEvent::EntryUpdated(ix)); @@ -824,35 +1041,38 @@ impl AcpThread { } /// Updates a tool call if id matches an existing entry, otherwise inserts a new one. - pub fn upsert_tool_call(&mut self, tool_call: acp::ToolCall, cx: &mut Context) { - let status = ToolCallStatus::Allowed { - status: tool_call.status, - }; - self.upsert_tool_call_inner(tool_call, status, cx) + pub fn upsert_tool_call( + &mut self, + tool_call: acp::ToolCall, + cx: &mut Context, + ) -> Result<(), acp::Error> { + let status = tool_call.status.into(); + self.upsert_tool_call_inner(tool_call.into(), status, cx) } + /// Fails if id does not match an existing entry. pub fn upsert_tool_call_inner( &mut self, - tool_call: acp::ToolCall, + tool_call_update: acp::ToolCallUpdate, status: ToolCallStatus, cx: &mut Context, - ) { + ) -> Result<(), acp::Error> { let language_registry = self.project.read(cx).languages().clone(); - let call = ToolCall::from_acp(tool_call, status, language_registry, cx); - - let location = call.locations.last().cloned(); + let id = tool_call_update.id.clone(); - if let Some((ix, current_call)) = self.tool_call_mut(&call.id) { - *current_call = call; + if let Some((ix, current_call)) = self.tool_call_mut(&id) { + current_call.update_fields(tool_call_update.fields, language_registry, cx); + current_call.status = status; cx.emit(AcpThreadEvent::EntryUpdated(ix)); } else { + let call = + ToolCall::from_acp(tool_call_update.try_into()?, status, language_registry, cx); self.push_entry(AgentThreadEntry::ToolCall(call), cx); - } + }; - if let Some(location) = location { - self.set_project_location(location, cx) - } + self.resolve_locations(id, cx); + Ok(()) } fn tool_call_mut(&mut self, id: &acp::ToolCallId) -> Option<(usize, &mut ToolCall)> { @@ -873,43 +1093,74 @@ impl AcpThread { }) } - pub fn set_project_location(&self, location: acp::ToolCallLocation, cx: &mut Context) { - self.project.update(cx, |project, cx| { - let Some(path) = project.project_path_for_absolute_path(&location.path, cx) else { - return; - }; - let buffer = project.open_buffer(path, cx); - cx.spawn(async move |project, cx| { - let buffer = buffer.await?; - - project.update(cx, |project, cx| { - let position = if let Some(line) = location.line { - let snapshot = buffer.read(cx).snapshot(); - let point = snapshot.clip_point(Point::new(line, 0), Bias::Left); - snapshot.anchor_before(point) - } else { - Anchor::MIN - }; + pub fn tool_call(&mut self, id: &acp::ToolCallId) -> Option<(usize, &ToolCall)> { + self.entries + .iter() + .enumerate() + .rev() + .find_map(|(index, tool_call)| { + if let AgentThreadEntry::ToolCall(tool_call) = tool_call + && &tool_call.id == id + { + Some((index, tool_call)) + } else { + None + } + }) + } - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position, - }), - cx, - ); - }) + pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context) { + let project = self.project.clone(); + let Some((_, tool_call)) = self.tool_call_mut(&id) else { + return; + }; + let task = tool_call.resolve_locations(project, cx); + cx.spawn(async move |this, cx| { + let resolved_locations = task.await; + this.update(cx, |this, cx| { + let project = this.project.clone(); + let Some((ix, tool_call)) = this.tool_call_mut(&id) else { + return; + }; + if let Some(Some(location)) = resolved_locations.last() { + project.update(cx, |project, cx| { + if let Some(agent_location) = project.agent_location() { + let should_ignore = agent_location.buffer == location.buffer + && location + .buffer + .update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + let old_position = + agent_location.position.to_point(&snapshot); + let new_position = location.position.to_point(&snapshot); + // ignore this so that when we get updates from the edit tool + // the position doesn't reset to the startof line + old_position.row == new_position.row + && old_position.column > new_position.column + }) + .ok() + .unwrap_or_default(); + if !should_ignore { + project.set_agent_location(Some(location.clone()), cx); + } + } + }); + } + if tool_call.resolved_locations != resolved_locations { + tool_call.resolved_locations = resolved_locations; + cx.emit(AcpThreadEvent::EntryUpdated(ix)); + } }) - .detach_and_log_err(cx); - }); + }) + .detach(); } pub fn request_tool_call_authorization( &mut self, - tool_call: acp::ToolCall, + tool_call: acp::ToolCallUpdate, options: Vec, cx: &mut Context, - ) -> oneshot::Receiver { + ) -> Result, acp::Error> { let (tx, rx) = oneshot::channel(); let status = ToolCallStatus::WaitingForConfirmation { @@ -917,9 +1168,9 @@ impl AcpThread { respond_tx: tx, }; - self.upsert_tool_call_inner(tool_call, status, cx); + self.upsert_tool_call_inner(tool_call, status, cx)?; cx.emit(AcpThreadEvent::ToolAuthorizationRequired); - rx + Ok(rx) } pub fn authorize_tool_call( @@ -938,9 +1189,7 @@ impl AcpThread { ToolCallStatus::Rejected } acp::PermissionOptionKind::AllowOnce | acp::PermissionOptionKind::AllowAlways => { - ToolCallStatus::Allowed { - status: acp::ToolCallStatus::InProgress, - } + ToolCallStatus::InProgress } }; @@ -961,7 +1210,10 @@ impl AcpThread { match &entry { AgentThreadEntry::ToolCall(call) => match call.status { ToolCallStatus::WaitingForConfirmation { .. } => return true, - ToolCallStatus::Allowed { .. } + ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::Completed + | ToolCallStatus::Failed | ToolCallStatus::Rejected | ToolCallStatus::Canceled => continue, }, @@ -1035,66 +1287,129 @@ impl AcpThread { self.project.read(cx).languages().clone(), cx, ); - self.push_entry( - AgentThreadEntry::UserMessage(UserMessage { content: block }), - cx, - ); - self.clear_completed_plan_entries(cx); + let request = acp::PromptRequest { + prompt: message.clone(), + session_id: self.session_id.clone(), + }; + let git_store = self.project.read(cx).git_store().clone(); - let (tx, rx) = oneshot::channel(); - let cancel_task = self.cancel(cx); + let message_id = if self + .connection + .session_editor(&self.session_id, cx) + .is_some() + { + Some(UserMessageId::new()) + } else { + None + }; - self.send_task = Some(cx.spawn(async move |this, cx| { - async { - cancel_task.await; - - let result = this - .update(cx, |this, cx| { - this.connection.prompt( - acp::PromptRequest { - prompt: message, - session_id: this.session_id.clone(), - }, - cx, - ) - })? - .await; + self.run_turn(cx, async move |this, cx| { + this.update(cx, |this, cx| { + this.push_entry( + AgentThreadEntry::UserMessage(UserMessage { + id: message_id.clone(), + content: block, + chunks: message, + checkpoint: None, + }), + cx, + ); + }) + .ok(); - tx.send(result).log_err(); + let old_checkpoint = git_store + .update(cx, |git, cx| git.checkpoint(cx))? + .await + .context("failed to get old checkpoint") + .log_err(); + this.update(cx, |this, cx| { + if let Some((_ix, message)) = this.last_user_message() { + message.checkpoint = old_checkpoint.map(|git_checkpoint| Checkpoint { + git_checkpoint, + show: false, + }); + } + this.connection.prompt(message_id, request, cx) + })? + .await + }) + } - anyhow::Ok(()) - } + pub fn resume(&mut self, cx: &mut Context) -> BoxFuture<'static, Result<()>> { + self.run_turn(cx, async move |this, cx| { + this.update(cx, |this, cx| { + this.connection + .resume(&this.session_id, cx) + .map(|resume| resume.run(cx)) + })? + .context("resuming a session is not supported")? .await - .log_err(); + }) + } + + fn run_turn( + &mut self, + cx: &mut Context, + f: impl 'static + AsyncFnOnce(WeakEntity, &mut AsyncApp) -> Result, + ) -> BoxFuture<'static, Result<()>> { + self.clear_completed_plan_entries(cx); + + let (tx, rx) = oneshot::channel(); + let cancel_task = self.cancel(cx); + + self.send_task = Some(cx.spawn(async move |this, cx| { + cancel_task.await; + tx.send(f(this, cx).await).ok(); })); - cx.spawn(async move |this, cx| match rx.await { - Ok(Err(e)) => { - this.update(cx, |_, cx| cx.emit(AcpThreadEvent::Error)) - .log_err(); - Err(e)? - } - result => { - let cancelled = matches!( - result, - Ok(Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Cancelled - })) - ); + cx.spawn(async move |this, cx| { + let response = rx.await; + + this.update(cx, |this, cx| this.update_last_checkpoint(cx))? + .await?; + + this.update(cx, |this, cx| { + this.project + .update(cx, |project, cx| project.set_agent_location(None, cx)); + match response { + Ok(Err(e)) => { + this.send_task.take(); + cx.emit(AcpThreadEvent::Error); + Err(e) + } + result => { + let canceled = matches!( + result, + Ok(Ok(acp::PromptResponse { + stop_reason: acp::StopReason::Cancelled + })) + ); - // We only take the task if the current prompt wasn't cancelled. - // - // This prompt may have been cancelled because another one was sent - // while it was still generating. In these cases, dropping `send_task` - // would cause the next generation to be cancelled. - if !cancelled { - this.update(cx, |this, _cx| this.send_task.take()).ok(); - } + // We only take the task if the current prompt wasn't canceled. + // + // This prompt may have been canceled because another one was sent + // while it was still generating. In these cases, dropping `send_task` + // would cause the next generation to be canceled. + if !canceled { + this.send_task.take(); + } - this.update(cx, |_, cx| cx.emit(AcpThreadEvent::Stopped)) - .log_err(); - Ok(()) - } + // Truncate entries if the last prompt was refused. + if let Ok(Ok(acp::PromptResponse { + stop_reason: acp::StopReason::Refusal, + })) = result + && let Some((ix, _)) = this.last_user_message() + { + let range = ix..this.entries.len(); + this.entries.truncate(ix); + cx.emit(AcpThreadEvent::EntriesRemoved(range)); + } + + cx.emit(AcpThreadEvent::Stopped); + Ok(()) + } + } + })? }) .boxed() } @@ -1108,10 +1423,9 @@ impl AcpThread { if let AgentThreadEntry::ToolCall(call) = entry { let cancel = matches!( call.status, - ToolCallStatus::WaitingForConfirmation { .. } - | ToolCallStatus::Allowed { - status: acp::ToolCallStatus::InProgress - } + ToolCallStatus::Pending + | ToolCallStatus::WaitingForConfirmation { .. } + | ToolCallStatus::InProgress ); if cancel { @@ -1126,6 +1440,122 @@ impl AcpThread { cx.foreground_executor().spawn(send_task) } + /// Rewinds this thread to before the entry at `index`, removing it and all + /// subsequent entries while reverting any changes made from that point. + pub fn rewind(&mut self, id: UserMessageId, cx: &mut Context) -> Task> { + let Some(session_editor) = self.connection.session_editor(&self.session_id, cx) else { + return Task::ready(Err(anyhow!("not supported"))); + }; + let Some(message) = self.user_message(&id) else { + return Task::ready(Err(anyhow!("message not found"))); + }; + + let checkpoint = message + .checkpoint + .as_ref() + .map(|c| c.git_checkpoint.clone()); + + let git_store = self.project.read(cx).git_store().clone(); + cx.spawn(async move |this, cx| { + if let Some(checkpoint) = checkpoint { + git_store + .update(cx, |git, cx| git.restore_checkpoint(checkpoint, cx))? + .await?; + } + + cx.update(|cx| session_editor.truncate(id.clone(), cx))? + .await?; + this.update(cx, |this, cx| { + if let Some((ix, _)) = this.user_message_mut(&id) { + let range = ix..this.entries.len(); + this.entries.truncate(ix); + cx.emit(AcpThreadEvent::EntriesRemoved(range)); + } + }) + }) + } + + fn update_last_checkpoint(&mut self, cx: &mut Context) -> Task> { + let git_store = self.project.read(cx).git_store().clone(); + + let old_checkpoint = if let Some((_, message)) = self.last_user_message() { + if let Some(checkpoint) = message.checkpoint.as_ref() { + checkpoint.git_checkpoint.clone() + } else { + return Task::ready(Ok(())); + } + } else { + return Task::ready(Ok(())); + }; + + let new_checkpoint = git_store.update(cx, |git, cx| git.checkpoint(cx)); + cx.spawn(async move |this, cx| { + let new_checkpoint = new_checkpoint + .await + .context("failed to get new checkpoint") + .log_err(); + if let Some(new_checkpoint) = new_checkpoint { + let equal = git_store + .update(cx, |git, cx| { + git.compare_checkpoints(old_checkpoint.clone(), new_checkpoint, cx) + })? + .await + .unwrap_or(true); + this.update(cx, |this, cx| { + let (ix, message) = this.last_user_message().context("no user message")?; + let checkpoint = message.checkpoint.as_mut().context("no checkpoint")?; + checkpoint.show = !equal; + cx.emit(AcpThreadEvent::EntryUpdated(ix)); + anyhow::Ok(()) + })??; + } + + Ok(()) + }) + } + + fn last_user_message(&mut self) -> Option<(usize, &mut UserMessage)> { + self.entries + .iter_mut() + .enumerate() + .rev() + .find_map(|(ix, entry)| { + if let AgentThreadEntry::UserMessage(message) = entry { + Some((ix, message)) + } else { + None + } + }) + } + + fn user_message(&self, id: &UserMessageId) -> Option<&UserMessage> { + self.entries.iter().find_map(|entry| { + if let AgentThreadEntry::UserMessage(message) = entry { + if message.id.as_ref() == Some(id) { + Some(message) + } else { + None + } + } else { + None + } + }) + } + + fn user_message_mut(&mut self, id: &UserMessageId) -> Option<(usize, &mut UserMessage)> { + self.entries.iter_mut().enumerate().find_map(|(ix, entry)| { + if let AgentThreadEntry::UserMessage(message) = entry { + if message.id.as_ref() == Some(id) { + Some((ix, message)) + } else { + None + } + } else { + None + } + }) + } + pub fn read_text_file( &self, path: PathBuf, @@ -1240,30 +1670,59 @@ impl AcpThread { .collect::>() }) .await; - cx.update(|cx| { - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: edits - .last() - .map(|(range, _)| range.end) - .unwrap_or(Anchor::MIN), - }), - cx, - ); - }); + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: edits + .last() + .map(|(range, _)| range.end) + .unwrap_or(Anchor::MIN), + }), + cx, + ); + })?; + + let format_on_save = cx.update(|cx| { action_log.update(cx, |action_log, cx| { action_log.buffer_read(buffer.clone(), cx); }); - buffer.update(cx, |buffer, cx| { + + let format_on_save = buffer.update(cx, |buffer, cx| { buffer.edit(edits, None, cx); + + let settings = language::language_settings::language_settings( + buffer.language().map(|l| l.name()), + buffer.file(), + cx, + ); + + settings.format_on_save != FormatOnSave::Off }); action_log.update(cx, |action_log, cx| { action_log.buffer_edited(buffer.clone(), cx); }); + format_on_save })?; + + if format_on_save { + let format_task = project.update(cx, |project, cx| { + project.format( + HashSet::from_iter([buffer.clone()]), + LspFormatTarget::Buffers, + false, + FormatTrigger::Save, + cx, + ) + })?; + format_task.await.log_err(); + + action_log.update(cx, |action_log, cx| { + action_log.buffer_edited(buffer.clone(), cx); + })?; + } + project .update(cx, |project, cx| project.save_buffer(buffer, cx))? .await @@ -1274,8 +1733,50 @@ impl AcpThread { self.entries.iter().map(|e| e.to_markdown(cx)).collect() } - pub fn emit_server_exited(&mut self, status: ExitStatus, cx: &mut Context) { - cx.emit(AcpThreadEvent::ServerExited(status)); + pub fn emit_load_error(&mut self, error: LoadError, cx: &mut Context) { + cx.emit(AcpThreadEvent::LoadError(error)); + } +} + +fn markdown_for_raw_output( + raw_output: &serde_json::Value, + language_registry: &Arc, + cx: &mut App, +) -> Option> { + match raw_output { + serde_json::Value::Null => None, + serde_json::Value::Bool(value) => Some(cx.new(|cx| { + Markdown::new( + value.to_string().into(), + Some(language_registry.clone()), + None, + cx, + ) + })), + serde_json::Value::Number(value) => Some(cx.new(|cx| { + Markdown::new( + value.to_string().into(), + Some(language_registry.clone()), + None, + cx, + ) + })), + serde_json::Value::String(value) => Some(cx.new(|cx| { + Markdown::new( + value.clone().into(), + Some(language_registry.clone()), + None, + cx, + ) + })), + value => Some(cx.new(|cx| { + Markdown::new( + format!("```json\n{}\n```", value).into(), + Some(language_registry.clone()), + None, + cx, + ) + })), } } @@ -1284,15 +1785,21 @@ mod tests { use super::*; use anyhow::anyhow; use futures::{channel::mpsc, future::LocalBoxFuture, select}; - use gpui::{AsyncApp, TestAppContext, WeakEntity}; + use gpui::{App, AsyncApp, TestAppContext, WeakEntity}; use indoc::indoc; - use project::FakeFs; + use project::{FakeFs, Fs}; use rand::Rng as _; use serde_json::json; use settings::SettingsStore; use smol::stream::StreamExt as _; - use std::{cell::RefCell, rc::Rc, time::Duration}; - + use std::{ + any::Any, + cell::RefCell, + path::Path, + rc::Rc, + sync::atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, + time::Duration, + }; use util::path; fn init_test(cx: &mut TestAppContext) { @@ -1313,17 +1820,14 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .spawn(async move |mut cx| { - connection - .new_thread(project, Path::new(path!("/test")), &mut cx) - .await - }) + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) .await .unwrap(); // Test creating a new user message thread.update(cx, |thread, cx| { thread.push_user_content_block( + None, acp::ContentBlock::Text(acp::TextContent { annotations: None, text: "Hello, ".to_string(), @@ -1335,6 +1839,7 @@ mod tests { thread.update(cx, |thread, cx| { assert_eq!(thread.entries.len(), 1); if let AgentThreadEntry::UserMessage(user_msg) = &thread.entries[0] { + assert_eq!(user_msg.id, None); assert_eq!(user_msg.content.to_markdown(cx), "Hello, "); } else { panic!("Expected UserMessage"); @@ -1342,8 +1847,10 @@ mod tests { }); // Test appending to existing user message + let message_1_id = UserMessageId::new(); thread.update(cx, |thread, cx| { thread.push_user_content_block( + Some(message_1_id.clone()), acp::ContentBlock::Text(acp::TextContent { annotations: None, text: "world!".to_string(), @@ -1355,6 +1862,7 @@ mod tests { thread.update(cx, |thread, cx| { assert_eq!(thread.entries.len(), 1); if let AgentThreadEntry::UserMessage(user_msg) = &thread.entries[0] { + assert_eq!(user_msg.id, Some(message_1_id)); assert_eq!(user_msg.content.to_markdown(cx), "Hello, world!"); } else { panic!("Expected UserMessage"); @@ -1373,8 +1881,10 @@ mod tests { ); }); + let message_2_id = UserMessageId::new(); thread.update(cx, |thread, cx| { thread.push_user_content_block( + Some(message_2_id.clone()), acp::ContentBlock::Text(acp::TextContent { annotations: None, text: "New user message".to_string(), @@ -1386,6 +1896,7 @@ mod tests { thread.update(cx, |thread, cx| { assert_eq!(thread.entries.len(), 3); if let AgentThreadEntry::UserMessage(user_msg) = &thread.entries[2] { + assert_eq!(user_msg.id, Some(message_2_id)); assert_eq!(user_msg.content.to_markdown(cx), "New user message"); } else { panic!("Expected UserMessage at index 2"); @@ -1429,11 +1940,7 @@ mod tests { )); let thread = cx - .spawn(async move |mut cx| { - connection - .new_thread(project, Path::new(path!("/test")), &mut cx) - .await - }) + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) .await .unwrap(); @@ -1516,7 +2023,7 @@ mod tests { .unwrap(); let thread = cx - .spawn(|mut cx| connection.new_thread(project, Path::new(path!("/tmp")), &mut cx)) + .update(|cx| connection.new_thread(project, Path::new(path!("/tmp")), cx)) .await .unwrap(); @@ -1579,11 +2086,7 @@ mod tests { })); let thread = cx - .spawn(async move |mut cx| { - connection - .new_thread(project, Path::new(path!("/test")), &mut cx) - .await - }) + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) .await .unwrap(); @@ -1597,10 +2100,7 @@ mod tests { assert!(matches!( thread.entries[1], AgentThreadEntry::ToolCall(ToolCall { - status: ToolCallStatus::Allowed { - status: acp::ToolCallStatus::InProgress, - .. - }, + status: ToolCallStatus::InProgress, .. }) )); @@ -1639,10 +2139,7 @@ mod tests { assert!(matches!( thread.entries[1], AgentThreadEntry::ToolCall(ToolCall { - status: ToolCallStatus::Allowed { - status: acp::ToolCallStatus::Completed, - .. - }, + status: ToolCallStatus::Completed, .. }) )); @@ -1691,10 +2188,11 @@ mod tests { } })); - let thread = connection - .new_thread(project, Path::new(path!("/test")), &mut cx.to_async()) + let thread = cx + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) .await .unwrap(); + cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["Hi".into()], cx))) .await .unwrap(); @@ -1702,6 +2200,272 @@ mod tests { assert!(cx.read(|cx| !thread.read(cx).has_pending_edit_tool_calls())); } + #[gpui::test(iterations = 10)] + async fn test_checkpoints(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/test"), + json!({ + ".git": {} + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; + + let simulate_changes = Arc::new(AtomicBool::new(true)); + let next_filename = Arc::new(AtomicUsize::new(0)); + let connection = Rc::new(FakeAgentConnection::new().on_user_message({ + let simulate_changes = simulate_changes.clone(); + let next_filename = next_filename.clone(); + let fs = fs.clone(); + move |request, thread, mut cx| { + let fs = fs.clone(); + let simulate_changes = simulate_changes.clone(); + let next_filename = next_filename.clone(); + async move { + if simulate_changes.load(SeqCst) { + let filename = format!("/test/file-{}", next_filename.fetch_add(1, SeqCst)); + fs.write(Path::new(&filename), b"").await?; + } + + let acp::ContentBlock::Text(content) = &request.prompt[0] else { + panic!("expected text content block"); + }; + thread.update(&mut cx, |thread, cx| { + thread + .handle_session_update( + acp::SessionUpdate::AgentMessageChunk { + content: content.text.to_uppercase().into(), + }, + cx, + ) + .unwrap(); + })?; + Ok(acp::PromptResponse { + stop_reason: acp::StopReason::EndTurn, + }) + } + .boxed_local() + } + })); + let thread = cx + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) + .await + .unwrap(); + + cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["Lorem".into()], cx))) + .await + .unwrap(); + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User (checkpoint) + + Lorem + + ## Assistant + + LOREM + + "} + ); + }); + assert_eq!(fs.files(), vec![Path::new(path!("/test/file-0"))]); + + cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["ipsum".into()], cx))) + .await + .unwrap(); + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User (checkpoint) + + Lorem + + ## Assistant + + LOREM + + ## User (checkpoint) + + ipsum + + ## Assistant + + IPSUM + + "} + ); + }); + assert_eq!( + fs.files(), + vec![ + Path::new(path!("/test/file-0")), + Path::new(path!("/test/file-1")) + ] + ); + + // Checkpoint isn't stored when there are no changes. + simulate_changes.store(false, SeqCst); + cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["dolor".into()], cx))) + .await + .unwrap(); + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User (checkpoint) + + Lorem + + ## Assistant + + LOREM + + ## User (checkpoint) + + ipsum + + ## Assistant + + IPSUM + + ## User + + dolor + + ## Assistant + + DOLOR + + "} + ); + }); + assert_eq!( + fs.files(), + vec![ + Path::new(path!("/test/file-0")), + Path::new(path!("/test/file-1")) + ] + ); + + // Rewinding the conversation truncates the history and restores the checkpoint. + thread + .update(cx, |thread, cx| { + let AgentThreadEntry::UserMessage(message) = &thread.entries[2] else { + panic!("unexpected entries {:?}", thread.entries) + }; + thread.rewind(message.id.clone().unwrap(), cx) + }) + .await + .unwrap(); + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User (checkpoint) + + Lorem + + ## Assistant + + LOREM + + "} + ); + }); + assert_eq!(fs.files(), vec![Path::new(path!("/test/file-0"))]); + } + + #[gpui::test] + async fn test_refusal(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree(path!("/"), json!({})).await; + let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await; + + let refuse_next = Arc::new(AtomicBool::new(false)); + let connection = Rc::new(FakeAgentConnection::new().on_user_message({ + let refuse_next = refuse_next.clone(); + move |request, thread, mut cx| { + let refuse_next = refuse_next.clone(); + async move { + if refuse_next.load(SeqCst) { + return Ok(acp::PromptResponse { + stop_reason: acp::StopReason::Refusal, + }); + } + + let acp::ContentBlock::Text(content) = &request.prompt[0] else { + panic!("expected text content block"); + }; + thread.update(&mut cx, |thread, cx| { + thread + .handle_session_update( + acp::SessionUpdate::AgentMessageChunk { + content: content.text.to_uppercase().into(), + }, + cx, + ) + .unwrap(); + })?; + Ok(acp::PromptResponse { + stop_reason: acp::StopReason::EndTurn, + }) + } + .boxed_local() + } + })); + let thread = cx + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) + .await + .unwrap(); + + cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["hello".into()], cx))) + .await + .unwrap(); + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User + + hello + + ## Assistant + + HELLO + + "} + ); + }); + + // Simulate refusing the second message, ensuring the conversation gets + // truncated to before sending it. + refuse_next.store(true, SeqCst); + cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["world".into()], cx))) + .await + .unwrap(); + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User + + hello + + ## Assistant + + HELLO + + "} + ); + }); + } + async fn run_until_first_tool_call( thread: &Entity, cx: &mut TestAppContext, @@ -1783,7 +2547,7 @@ mod tests { self: Rc, project: Entity, _cwd: &Path, - cx: &mut gpui::AsyncApp, + cx: &mut App, ) -> Task>> { let session_id = acp::SessionId( rand::thread_rng() @@ -1793,9 +2557,16 @@ mod tests { .collect::() .into(), ); - let thread = cx - .new(|cx| AcpThread::new("Test", self.clone(), project, session_id.clone(), cx)) - .unwrap(); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let thread = cx.new(|_cx| { + AcpThread::new( + "Test", + self.clone(), + project, + action_log, + session_id.clone(), + ) + }); self.sessions.lock().insert(session_id, thread.downgrade()); Task::ready(Ok(thread)) } @@ -1810,6 +2581,7 @@ mod tests { fn prompt( &self, + _id: Option, params: acp::PromptRequest, cx: &mut App, ) -> Task> { @@ -1826,9 +2598,17 @@ mod tests { } } + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + } + } + fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { let sessions = self.sessions.lock(); - let thread = sessions.get(&session_id).unwrap().clone(); + let thread = sessions.get(session_id).unwrap().clone(); cx.spawn(async move |cx| { thread @@ -1838,5 +2618,29 @@ mod tests { }) .detach(); } + + fn session_editor( + &self, + session_id: &acp::SessionId, + _cx: &mut App, + ) -> Option> { + Some(Rc::new(FakeAgentSessionEditor { + _session_id: session_id.clone(), + })) + } + + fn into_any(self: Rc) -> Rc { + self + } + } + + struct FakeAgentSessionEditor { + _session_id: acp::SessionId, + } + + impl AgentSessionEditor for FakeAgentSessionEditor { + fn truncate(&self, _message_id: UserMessageId, _cx: &mut App) -> Task> { + Task::ready(Ok(())) + } } } diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index cf06563beee4aa6bdc6ecf1fdb85178116ad74dd..2bbd36487392452dc9a179c01f759e9fb68a7384 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -1,18 +1,141 @@ -use std::{error::Error, fmt, path::Path, rc::Rc, sync::Arc}; - +use crate::AcpThread; use agent_client_protocol::{self as acp}; use anyhow::Result; -use gpui::{AsyncApp, Entity, Task}; -use language_model::LanguageModel; +use collections::IndexMap; +use gpui::{Entity, SharedString, Task}; +use language_model::LanguageModelProviderId; use project::Project; -use ui::App; +use serde::{Deserialize, Serialize}; +use std::{any::Any, error::Error, fmt, path::Path, rc::Rc, sync::Arc}; +use ui::{App, IconName}; +use uuid::Uuid; -use crate::AcpThread; +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +pub struct UserMessageId(Arc); + +impl UserMessageId { + pub fn new() -> Self { + Self(Uuid::new_v4().to_string().into()) + } +} + +pub trait AgentConnection { + fn new_thread( + self: Rc, + project: Entity, + cwd: &Path, + cx: &mut App, + ) -> Task>>; + + fn auth_methods(&self) -> &[acp::AuthMethod]; + + fn authenticate(&self, method: acp::AuthMethodId, cx: &mut App) -> Task>; + + fn prompt( + &self, + user_message_id: Option, + params: acp::PromptRequest, + cx: &mut App, + ) -> Task>; + + fn prompt_capabilities(&self) -> acp::PromptCapabilities; + + fn resume( + &self, + _session_id: &acp::SessionId, + _cx: &mut App, + ) -> Option> { + None + } + + fn cancel(&self, session_id: &acp::SessionId, cx: &mut App); + + fn session_editor( + &self, + _session_id: &acp::SessionId, + _cx: &mut App, + ) -> Option> { + None + } + + /// Returns this agent as an [Rc] if the model selection capability is supported. + /// + /// If the agent does not support model selection, returns [None]. + /// This allows sharing the selector in UI components. + fn model_selector(&self) -> Option> { + None + } + + fn telemetry(&self) -> Option> { + None + } + + fn into_any(self: Rc) -> Rc; +} + +impl dyn AgentConnection { + pub fn downcast(self: Rc) -> Option> { + self.into_any().downcast().ok() + } +} + +pub trait AgentSessionEditor { + fn truncate(&self, message_id: UserMessageId, cx: &mut App) -> Task>; +} + +pub trait AgentSessionResume { + fn run(&self, cx: &mut App) -> Task>; +} + +pub trait AgentTelemetry { + /// The name of the agent used for telemetry. + fn agent_name(&self) -> String; + + /// A representation of the current thread state that can be serialized for + /// storage with telemetry events. + fn thread_data( + &self, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task>; +} + +#[derive(Debug)] +pub struct AuthRequired { + pub description: Option, + pub provider_id: Option, +} + +impl AuthRequired { + pub fn new() -> Self { + Self { + description: None, + provider_id: None, + } + } + + pub fn with_description(mut self, description: String) -> Self { + self.description = Some(description); + self + } + + pub fn with_language_model_provider(mut self, provider_id: LanguageModelProviderId) -> Self { + self.provider_id = Some(provider_id); + self + } +} + +impl Error for AuthRequired {} +impl fmt::Display for AuthRequired { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Authentication required") + } +} /// Trait for agents that support listing, selecting, and querying language models. /// /// This is an optional capability; agents indicate support via [AgentConnection::model_selector]. -pub trait ModelSelector: 'static { +pub trait AgentModelSelector: 'static { /// Lists all available language models for this agent. /// /// # Parameters @@ -20,7 +143,7 @@ pub trait ModelSelector: 'static { /// /// # Returns /// A task resolving to the list of models or an error (e.g., if no models are configured). - fn list_models(&self, cx: &mut AsyncApp) -> Task>>>; + fn list_models(&self, cx: &mut App) -> Task>; /// Selects a model for a specific session (thread). /// @@ -37,8 +160,8 @@ pub trait ModelSelector: 'static { fn select_model( &self, session_id: acp::SessionId, - model: Arc, - cx: &mut AsyncApp, + model_id: AgentModelId, + cx: &mut App, ) -> Task>; /// Retrieves the currently selected model for a specific session (thread). @@ -52,42 +175,276 @@ pub trait ModelSelector: 'static { fn selected_model( &self, session_id: &acp::SessionId, - cx: &mut AsyncApp, - ) -> Task>>; + cx: &mut App, + ) -> Task>; + + /// Whenever the model list is updated the receiver will be notified. + fn watch(&self, cx: &mut App) -> watch::Receiver<()>; } -pub trait AgentConnection { - fn new_thread( - self: Rc, - project: Entity, - cwd: &Path, - cx: &mut AsyncApp, - ) -> Task>>; +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AgentModelId(pub SharedString); - fn auth_methods(&self) -> &[acp::AuthMethod]; +impl std::ops::Deref for AgentModelId { + type Target = SharedString; - fn authenticate(&self, method: acp::AuthMethodId, cx: &mut App) -> Task>; + fn deref(&self) -> &Self::Target { + &self.0 + } +} - fn prompt(&self, params: acp::PromptRequest, cx: &mut App) - -> Task>; +impl fmt::Display for AgentModelId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} - fn cancel(&self, session_id: &acp::SessionId, cx: &mut App); +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AgentModelInfo { + pub id: AgentModelId, + pub name: SharedString, + pub icon: Option, +} - /// Returns this agent as an [Rc] if the model selection capability is supported. - /// - /// If the agent does not support model selection, returns [None]. - /// This allows sharing the selector in UI components. - fn model_selector(&self) -> Option> { - None // Default impl for agents that don't support it +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AgentModelGroupName(pub SharedString); + +#[derive(Debug, Clone)] +pub enum AgentModelList { + Flat(Vec), + Grouped(IndexMap>), +} + +impl AgentModelList { + pub fn is_empty(&self) -> bool { + match self { + AgentModelList::Flat(models) => models.is_empty(), + AgentModelList::Grouped(groups) => groups.is_empty(), + } } } -#[derive(Debug)] -pub struct AuthRequired; +#[cfg(feature = "test-support")] +mod test_support { + use std::sync::Arc; -impl Error for AuthRequired {} -impl fmt::Display for AuthRequired { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "AuthRequired") + use action_log::ActionLog; + use collections::HashMap; + use futures::{channel::oneshot, future::try_join_all}; + use gpui::{AppContext as _, WeakEntity}; + use parking_lot::Mutex; + + use super::*; + + #[derive(Clone, Default)] + pub struct StubAgentConnection { + sessions: Arc>>, + permission_requests: HashMap>, + next_prompt_updates: Arc>>, + } + + struct Session { + thread: WeakEntity, + response_tx: Option>, + } + + impl StubAgentConnection { + pub fn new() -> Self { + Self { + next_prompt_updates: Default::default(), + permission_requests: HashMap::default(), + sessions: Arc::default(), + } + } + + pub fn set_next_prompt_updates(&self, updates: Vec) { + *self.next_prompt_updates.lock() = updates; + } + + pub fn with_permission_requests( + mut self, + permission_requests: HashMap>, + ) -> Self { + self.permission_requests = permission_requests; + self + } + + pub fn send_update( + &self, + session_id: acp::SessionId, + update: acp::SessionUpdate, + cx: &mut App, + ) { + assert!( + self.next_prompt_updates.lock().is_empty(), + "Use either send_update or set_next_prompt_updates" + ); + + self.sessions + .lock() + .get(&session_id) + .unwrap() + .thread + .update(cx, |thread, cx| { + thread.handle_session_update(update, cx).unwrap(); + }) + .unwrap(); + } + + pub fn end_turn(&self, session_id: acp::SessionId, stop_reason: acp::StopReason) { + self.sessions + .lock() + .get_mut(&session_id) + .unwrap() + .response_tx + .take() + .expect("No pending turn") + .send(stop_reason) + .unwrap(); + } + } + + impl AgentConnection for StubAgentConnection { + fn auth_methods(&self) -> &[acp::AuthMethod] { + &[] + } + + fn new_thread( + self: Rc, + project: Entity, + _cwd: &Path, + cx: &mut gpui::App, + ) -> Task>> { + let session_id = acp::SessionId(self.sessions.lock().len().to_string().into()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let thread = cx.new(|_cx| { + AcpThread::new( + "Test", + self.clone(), + project, + action_log, + session_id.clone(), + ) + }); + self.sessions.lock().insert( + session_id, + Session { + thread: thread.downgrade(), + response_tx: None, + }, + ); + Task::ready(Ok(thread)) + } + + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + } + } + + fn authenticate( + &self, + _method_id: acp::AuthMethodId, + _cx: &mut App, + ) -> Task> { + unimplemented!() + } + + fn prompt( + &self, + _id: Option, + params: acp::PromptRequest, + cx: &mut App, + ) -> Task> { + let mut sessions = self.sessions.lock(); + let Session { + thread, + response_tx, + } = sessions.get_mut(¶ms.session_id).unwrap(); + let mut tasks = vec![]; + if self.next_prompt_updates.lock().is_empty() { + let (tx, rx) = oneshot::channel(); + response_tx.replace(tx); + cx.spawn(async move |_| { + let stop_reason = rx.await?; + Ok(acp::PromptResponse { stop_reason }) + }) + } else { + for update in self.next_prompt_updates.lock().drain(..) { + let thread = thread.clone(); + let update = update.clone(); + let permission_request = if let acp::SessionUpdate::ToolCall(tool_call) = + &update + && let Some(options) = self.permission_requests.get(&tool_call.id) + { + Some((tool_call.clone(), options.clone())) + } else { + None + }; + let task = cx.spawn(async move |cx| { + if let Some((tool_call, options)) = permission_request { + let permission = thread.update(cx, |thread, cx| { + thread.request_tool_call_authorization( + tool_call.clone().into(), + options.clone(), + cx, + ) + })?; + permission?.await?; + } + thread.update(cx, |thread, cx| { + thread.handle_session_update(update.clone(), cx).unwrap(); + })?; + anyhow::Ok(()) + }); + tasks.push(task); + } + + cx.spawn(async move |_| { + try_join_all(tasks).await?; + Ok(acp::PromptResponse { + stop_reason: acp::StopReason::EndTurn, + }) + }) + } + } + + fn cancel(&self, session_id: &acp::SessionId, _cx: &mut App) { + if let Some(end_turn_tx) = self + .sessions + .lock() + .get_mut(session_id) + .unwrap() + .response_tx + .take() + { + end_turn_tx.send(acp::StopReason::Cancelled).unwrap(); + } + } + + fn session_editor( + &self, + _session_id: &agent_client_protocol::SessionId, + _cx: &mut App, + ) -> Option> { + Some(Rc::new(StubAgentSessionEditor)) + } + + fn into_any(self: Rc) -> Rc { + self + } + } + + struct StubAgentSessionEditor; + + impl AgentSessionEditor for StubAgentSessionEditor { + fn truncate(&self, _: UserMessageId, _: &mut App) -> Task> { + Task::ready(Ok(())) + } } } + +#[cfg(feature = "test-support")] +pub use test_support::*; diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs new file mode 100644 index 0000000000000000000000000000000000000000..130bc3ab6bced76320c80e49aef5bdb555c54e7e --- /dev/null +++ b/crates/acp_thread/src/diff.rs @@ -0,0 +1,385 @@ +use anyhow::Result; +use buffer_diff::{BufferDiff, BufferDiffSnapshot}; +use editor::{MultiBuffer, PathKey}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task}; +use itertools::Itertools; +use language::{ + Anchor, Buffer, Capability, LanguageRegistry, OffsetRangeExt as _, Point, Rope, TextBuffer, +}; +use std::{ + cmp::Reverse, + ops::Range, + path::{Path, PathBuf}, + sync::Arc, +}; +use util::ResultExt; + +pub enum Diff { + Pending(PendingDiff), + Finalized(FinalizedDiff), +} + +impl Diff { + pub fn finalized( + path: PathBuf, + old_text: Option, + new_text: String, + language_registry: Arc, + cx: &mut Context, + ) -> Self { + let multibuffer = cx.new(|_cx| MultiBuffer::without_headers(Capability::ReadOnly)); + let buffer = cx.new(|cx| Buffer::local(new_text, cx)); + let task = cx.spawn({ + let multibuffer = multibuffer.clone(); + let path = path.clone(); + async move |_, cx| { + let language = language_registry + .language_for_file_path(&path) + .await + .log_err(); + + buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?; + + let diff = build_buffer_diff( + old_text.unwrap_or("".into()).into(), + &buffer, + Some(language_registry.clone()), + cx, + ) + .await?; + + multibuffer + .update(cx, |multibuffer, cx| { + let hunk_ranges = { + let buffer = buffer.read(cx); + let diff = diff.read(cx); + diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) + .collect::>() + }; + + multibuffer.set_excerpts_for_path( + PathKey::for_buffer(&buffer, cx), + buffer.clone(), + hunk_ranges, + editor::DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + multibuffer.add_diff(diff, cx); + }) + .log_err(); + + anyhow::Ok(()) + } + }); + + Self::Finalized(FinalizedDiff { + multibuffer, + path, + _update_diff: task, + }) + } + + pub fn new(buffer: Entity, cx: &mut Context) -> Self { + let buffer_snapshot = buffer.read(cx).snapshot(); + let base_text = buffer_snapshot.text(); + let language_registry = buffer.read(cx).language_registry(); + let text_snapshot = buffer.read(cx).text_snapshot(); + let buffer_diff = cx.new(|cx| { + let mut diff = BufferDiff::new(&text_snapshot, cx); + let _ = diff.set_base_text( + buffer_snapshot.clone(), + language_registry, + text_snapshot, + cx, + ); + let snapshot = diff.snapshot(cx); + + let secondary_diff = cx.new(|cx| { + let mut diff = BufferDiff::new(&buffer_snapshot, cx); + diff.set_snapshot(snapshot, &buffer_snapshot, cx); + diff + }); + diff.set_secondary_diff(secondary_diff); + + diff + }); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::without_headers(Capability::ReadOnly); + multibuffer.add_diff(buffer_diff.clone(), cx); + multibuffer + }); + + Self::Pending(PendingDiff { + multibuffer, + base_text: Arc::new(base_text), + _subscription: cx.observe(&buffer, |this, _, cx| { + if let Diff::Pending(diff) = this { + diff.update(cx); + } + }), + buffer, + diff: buffer_diff, + revealed_ranges: Vec::new(), + update_diff: Task::ready(Ok(())), + }) + } + + pub fn reveal_range(&mut self, range: Range, cx: &mut Context) { + if let Self::Pending(diff) = self { + diff.reveal_range(range, cx); + } + } + + pub fn finalize(&mut self, cx: &mut Context) { + if let Self::Pending(diff) = self { + *self = Self::Finalized(diff.finalize(cx)); + } + } + + pub fn multibuffer(&self) -> &Entity { + match self { + Self::Pending(PendingDiff { multibuffer, .. }) => multibuffer, + Self::Finalized(FinalizedDiff { multibuffer, .. }) => multibuffer, + } + } + + pub fn to_markdown(&self, cx: &App) -> String { + let buffer_text = self + .multibuffer() + .read(cx) + .all_buffers() + .iter() + .map(|buffer| buffer.read(cx).text()) + .join("\n"); + let path = match self { + Diff::Pending(PendingDiff { buffer, .. }) => { + buffer.read(cx).file().map(|file| file.path().as_ref()) + } + Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_path()), + }; + format!( + "Diff: {}\n```\n{}\n```\n", + path.unwrap_or(Path::new("untitled")).display(), + buffer_text + ) + } + + pub fn has_revealed_range(&self, cx: &App) -> bool { + self.multibuffer().read(cx).excerpt_paths().next().is_some() + } +} + +pub struct PendingDiff { + multibuffer: Entity, + base_text: Arc, + buffer: Entity, + diff: Entity, + revealed_ranges: Vec>, + _subscription: Subscription, + update_diff: Task>, +} + +impl PendingDiff { + pub fn update(&mut self, cx: &mut Context) { + let buffer = self.buffer.clone(); + let buffer_diff = self.diff.clone(); + let base_text = self.base_text.clone(); + self.update_diff = cx.spawn(async move |diff, cx| { + let text_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot())?; + let diff_snapshot = BufferDiff::update_diff( + buffer_diff.clone(), + text_snapshot.clone(), + Some(base_text), + false, + false, + None, + None, + cx, + ) + .await?; + buffer_diff.update(cx, |diff, cx| { + diff.set_snapshot(diff_snapshot.clone(), &text_snapshot, cx); + diff.secondary_diff().unwrap().update(cx, |diff, cx| { + diff.set_snapshot(diff_snapshot.clone(), &text_snapshot, cx); + }); + })?; + diff.update(cx, |diff, cx| { + if let Diff::Pending(diff) = diff { + diff.update_visible_ranges(cx); + } + }) + }); + } + + pub fn reveal_range(&mut self, range: Range, cx: &mut Context) { + self.revealed_ranges.push(range); + self.update_visible_ranges(cx); + } + + fn finalize(&self, cx: &mut Context) -> FinalizedDiff { + let ranges = self.excerpt_ranges(cx); + let base_text = self.base_text.clone(); + let language_registry = self.buffer.read(cx).language_registry(); + + let path = self + .buffer + .read(cx) + .file() + .map(|file| file.path().as_ref()) + .unwrap_or(Path::new("untitled")) + .into(); + + // Replace the buffer in the multibuffer with the snapshot + let buffer = cx.new(|cx| { + let language = self.buffer.read(cx).language().cloned(); + let buffer = TextBuffer::new_normalized( + 0, + cx.entity_id().as_non_zero_u64().into(), + self.buffer.read(cx).line_ending(), + self.buffer.read(cx).as_rope().clone(), + ); + let mut buffer = Buffer::build(buffer, None, Capability::ReadWrite); + buffer.set_language(language, cx); + buffer + }); + + let buffer_diff = cx.spawn({ + let buffer = buffer.clone(); + async move |_this, cx| { + build_buffer_diff(base_text, &buffer, language_registry, cx).await + } + }); + + let update_diff = cx.spawn(async move |this, cx| { + let buffer_diff = buffer_diff.await?; + this.update(cx, |this, cx| { + this.multibuffer().update(cx, |multibuffer, cx| { + let path_key = PathKey::for_buffer(&buffer, cx); + multibuffer.clear(cx); + multibuffer.set_excerpts_for_path( + path_key, + buffer, + ranges, + editor::DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + multibuffer.add_diff(buffer_diff.clone(), cx); + }); + + cx.notify(); + }) + }); + + FinalizedDiff { + path, + multibuffer: self.multibuffer.clone(), + _update_diff: update_diff, + } + } + + fn update_visible_ranges(&mut self, cx: &mut Context) { + let ranges = self.excerpt_ranges(cx); + self.multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + PathKey::for_buffer(&self.buffer, cx), + self.buffer.clone(), + ranges, + editor::DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + let end = multibuffer.len(cx); + Some(multibuffer.snapshot(cx).offset_to_point(end).row + 1) + }); + cx.notify(); + } + + fn excerpt_ranges(&self, cx: &App) -> Vec> { + let buffer = self.buffer.read(cx); + let diff = self.diff.read(cx); + let mut ranges = diff + .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) + .collect::>(); + ranges.extend( + self.revealed_ranges + .iter() + .map(|range| range.to_point(buffer)), + ); + ranges.sort_unstable_by_key(|range| (range.start, Reverse(range.end))); + + // Merge adjacent ranges + let mut ranges = ranges.into_iter().peekable(); + let mut merged_ranges = Vec::new(); + while let Some(mut range) = ranges.next() { + while let Some(next_range) = ranges.peek() { + if range.end >= next_range.start { + range.end = range.end.max(next_range.end); + ranges.next(); + } else { + break; + } + } + + merged_ranges.push(range); + } + merged_ranges + } +} + +pub struct FinalizedDiff { + path: PathBuf, + multibuffer: Entity, + _update_diff: Task>, +} + +async fn build_buffer_diff( + old_text: Arc, + buffer: &Entity, + language_registry: Option>, + cx: &mut AsyncApp, +) -> Result> { + let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; + + let old_text_rope = cx + .background_spawn({ + let old_text = old_text.clone(); + async move { Rope::from(old_text.as_str()) } + }) + .await; + let base_buffer = cx + .update(|cx| { + Buffer::build_snapshot( + old_text_rope, + buffer.language().cloned(), + language_registry, + cx, + ) + })? + .await; + + let diff_snapshot = cx + .update(|cx| { + BufferDiffSnapshot::new_with_base_buffer( + buffer.text.clone(), + Some(old_text), + base_buffer, + cx, + ) + })? + .await; + + let secondary_diff = cx.new(|cx| { + let mut diff = BufferDiff::new(&buffer, cx); + diff.set_snapshot(diff_snapshot.clone(), &buffer, cx); + diff + })?; + + cx.new(|cx| { + let mut diff = BufferDiff::new(&buffer.text, cx); + diff.set_snapshot(diff_snapshot, &buffer, cx); + diff.set_secondary_diff(secondary_diff); + diff + }) +} diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs new file mode 100644 index 0000000000000000000000000000000000000000..a1e713cffa051a0eef58a45fceafc8876cab3311 --- /dev/null +++ b/crates/acp_thread/src/mention.rs @@ -0,0 +1,438 @@ +use agent_client_protocol as acp; +use anyhow::{Context as _, Result, bail}; +use file_icons::FileIcons; +use prompt_store::{PromptId, UserPromptId}; +use serde::{Deserialize, Serialize}; +use std::{ + fmt, + ops::Range, + path::{Path, PathBuf}, + str::FromStr, +}; +use ui::{App, IconName, SharedString}; +use url::Url; + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] +pub enum MentionUri { + File { + abs_path: PathBuf, + }, + Directory { + abs_path: PathBuf, + }, + Symbol { + path: PathBuf, + name: String, + line_range: Range, + }, + Thread { + id: acp::SessionId, + name: String, + }, + TextThread { + path: PathBuf, + name: String, + }, + Rule { + id: PromptId, + name: String, + }, + Selection { + path: PathBuf, + line_range: Range, + }, + Fetch { + url: Url, + }, +} + +impl MentionUri { + pub fn parse(input: &str) -> Result { + let url = url::Url::parse(input)?; + let path = url.path(); + match url.scheme() { + "file" => { + let path = url.to_file_path().ok().context("Extracting file path")?; + if let Some(fragment) = url.fragment() { + let range = fragment + .strip_prefix("L") + .context("Line range must start with \"L\"")?; + let (start, end) = range + .split_once(":") + .context("Line range must use colon as separator")?; + let line_range = start + .parse::() + .context("Parsing line range start")? + .checked_sub(1) + .context("Line numbers should be 1-based")? + ..end + .parse::() + .context("Parsing line range end")? + .checked_sub(1) + .context("Line numbers should be 1-based")?; + if let Some(name) = single_query_param(&url, "symbol")? { + Ok(Self::Symbol { + name, + path, + line_range, + }) + } else { + Ok(Self::Selection { path, line_range }) + } + } else if input.ends_with("/") { + Ok(Self::Directory { abs_path: path }) + } else { + Ok(Self::File { abs_path: path }) + } + } + "zed" => { + if let Some(thread_id) = path.strip_prefix("/agent/thread/") { + let name = single_query_param(&url, "name")?.context("Missing thread name")?; + Ok(Self::Thread { + id: acp::SessionId(thread_id.into()), + name, + }) + } else if let Some(path) = path.strip_prefix("/agent/text-thread/") { + let name = single_query_param(&url, "name")?.context("Missing thread name")?; + Ok(Self::TextThread { + path: path.into(), + name, + }) + } else if let Some(rule_id) = path.strip_prefix("/agent/rule/") { + let name = single_query_param(&url, "name")?.context("Missing rule name")?; + let rule_id = UserPromptId(rule_id.parse()?); + Ok(Self::Rule { + id: rule_id.into(), + name, + }) + } else { + bail!("invalid zed url: {:?}", input); + } + } + "http" | "https" => Ok(MentionUri::Fetch { url }), + other => bail!("unrecognized scheme {:?}", other), + } + } + + pub fn name(&self) -> String { + match self { + MentionUri::File { abs_path, .. } | MentionUri::Directory { abs_path, .. } => abs_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .into_owned(), + MentionUri::Symbol { name, .. } => name.clone(), + MentionUri::Thread { name, .. } => name.clone(), + MentionUri::TextThread { name, .. } => name.clone(), + MentionUri::Rule { name, .. } => name.clone(), + MentionUri::Selection { + path, line_range, .. + } => selection_name(path, line_range), + MentionUri::Fetch { url } => url.to_string(), + } + } + + pub fn icon_path(&self, cx: &mut App) -> SharedString { + match self { + MentionUri::File { abs_path } => { + FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into()) + } + MentionUri::Directory { .. } => FileIcons::get_folder_icon(false, cx) + .unwrap_or_else(|| IconName::Folder.path().into()), + MentionUri::Symbol { .. } => IconName::Code.path().into(), + MentionUri::Thread { .. } => IconName::Thread.path().into(), + MentionUri::TextThread { .. } => IconName::Thread.path().into(), + MentionUri::Rule { .. } => IconName::Reader.path().into(), + MentionUri::Selection { .. } => IconName::Reader.path().into(), + MentionUri::Fetch { .. } => IconName::ToolWeb.path().into(), + } + } + + pub fn as_link<'a>(&'a self) -> MentionLink<'a> { + MentionLink(self) + } + + pub fn to_uri(&self) -> Url { + match self { + MentionUri::File { abs_path } => { + Url::from_file_path(abs_path).expect("mention path should be absolute") + } + MentionUri::Directory { abs_path } => { + Url::from_directory_path(abs_path).expect("mention path should be absolute") + } + MentionUri::Symbol { + path, + name, + line_range, + } => { + let mut url = Url::from_file_path(path).expect("mention path should be absolute"); + url.query_pairs_mut().append_pair("symbol", name); + url.set_fragment(Some(&format!( + "L{}:{}", + line_range.start + 1, + line_range.end + 1 + ))); + url + } + MentionUri::Selection { path, line_range } => { + let mut url = Url::from_file_path(path).expect("mention path should be absolute"); + url.set_fragment(Some(&format!( + "L{}:{}", + line_range.start + 1, + line_range.end + 1 + ))); + url + } + MentionUri::Thread { name, id } => { + let mut url = Url::parse("zed:///").unwrap(); + url.set_path(&format!("/agent/thread/{id}")); + url.query_pairs_mut().append_pair("name", name); + url + } + MentionUri::TextThread { path, name } => { + let mut url = Url::parse("zed:///").unwrap(); + url.set_path(&format!("/agent/text-thread/{}", path.to_string_lossy())); + url.query_pairs_mut().append_pair("name", name); + url + } + MentionUri::Rule { name, id } => { + let mut url = Url::parse("zed:///").unwrap(); + url.set_path(&format!("/agent/rule/{id}")); + url.query_pairs_mut().append_pair("name", name); + url + } + MentionUri::Fetch { url } => url.clone(), + } + } +} + +impl FromStr for MentionUri { + type Err = anyhow::Error; + + fn from_str(s: &str) -> anyhow::Result { + Self::parse(s) + } +} + +pub struct MentionLink<'a>(&'a MentionUri); + +impl fmt::Display for MentionLink<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[@{}]({})", self.0.name(), self.0.to_uri()) + } +} + +fn single_query_param(url: &Url, name: &'static str) -> Result> { + let pairs = url.query_pairs().collect::>(); + match pairs.as_slice() { + [] => Ok(None), + [(k, v)] => { + if k != name { + bail!("invalid query parameter") + } + + Ok(Some(v.to_string())) + } + _ => bail!("too many query pairs"), + } +} + +pub fn selection_name(path: &Path, line_range: &Range) -> String { + format!( + "{} ({}:{})", + path.file_name().unwrap_or_default().display(), + line_range.start + 1, + line_range.end + 1 + ) +} + +#[cfg(test)] +mod tests { + use util::{path, uri}; + + use super::*; + + #[test] + fn test_parse_file_uri() { + let file_uri = uri!("file:///path/to/file.rs"); + let parsed = MentionUri::parse(file_uri).unwrap(); + match &parsed { + MentionUri::File { abs_path } => { + assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs")); + } + _ => panic!("Expected File variant"), + } + assert_eq!(parsed.to_uri().to_string(), file_uri); + } + + #[test] + fn test_parse_directory_uri() { + let file_uri = uri!("file:///path/to/dir/"); + let parsed = MentionUri::parse(file_uri).unwrap(); + match &parsed { + MentionUri::Directory { abs_path } => { + assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/")); + } + _ => panic!("Expected Directory variant"), + } + assert_eq!(parsed.to_uri().to_string(), file_uri); + } + + #[test] + fn test_to_directory_uri_with_slash() { + let uri = MentionUri::Directory { + abs_path: PathBuf::from(path!("/path/to/dir/")), + }; + let expected = uri!("file:///path/to/dir/"); + assert_eq!(uri.to_uri().to_string(), expected); + } + + #[test] + fn test_to_directory_uri_without_slash() { + let uri = MentionUri::Directory { + abs_path: PathBuf::from(path!("/path/to/dir")), + }; + let expected = uri!("file:///path/to/dir/"); + assert_eq!(uri.to_uri().to_string(), expected); + } + + #[test] + fn test_parse_symbol_uri() { + let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); + let parsed = MentionUri::parse(symbol_uri).unwrap(); + match &parsed { + MentionUri::Symbol { + path, + name, + line_range, + } => { + assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs")); + assert_eq!(name, "MySymbol"); + assert_eq!(line_range.start, 9); + assert_eq!(line_range.end, 19); + } + _ => panic!("Expected Symbol variant"), + } + assert_eq!(parsed.to_uri().to_string(), symbol_uri); + } + + #[test] + fn test_parse_selection_uri() { + let selection_uri = uri!("file:///path/to/file.rs#L5:15"); + let parsed = MentionUri::parse(selection_uri).unwrap(); + match &parsed { + MentionUri::Selection { path, line_range } => { + assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs")); + assert_eq!(line_range.start, 4); + assert_eq!(line_range.end, 14); + } + _ => panic!("Expected Selection variant"), + } + assert_eq!(parsed.to_uri().to_string(), selection_uri); + } + + #[test] + fn test_parse_thread_uri() { + let thread_uri = "zed:///agent/thread/session123?name=Thread+name"; + let parsed = MentionUri::parse(thread_uri).unwrap(); + match &parsed { + MentionUri::Thread { + id: thread_id, + name, + } => { + assert_eq!(thread_id.to_string(), "session123"); + assert_eq!(name, "Thread name"); + } + _ => panic!("Expected Thread variant"), + } + assert_eq!(parsed.to_uri().to_string(), thread_uri); + } + + #[test] + fn test_parse_rule_uri() { + let rule_uri = "zed:///agent/rule/d8694ff2-90d5-4b6f-be33-33c1763acd52?name=Some+rule"; + let parsed = MentionUri::parse(rule_uri).unwrap(); + match &parsed { + MentionUri::Rule { id, name } => { + assert_eq!(id.to_string(), "d8694ff2-90d5-4b6f-be33-33c1763acd52"); + assert_eq!(name, "Some rule"); + } + _ => panic!("Expected Rule variant"), + } + assert_eq!(parsed.to_uri().to_string(), rule_uri); + } + + #[test] + fn test_parse_fetch_http_uri() { + let http_uri = "http://example.com/path?query=value#fragment"; + let parsed = MentionUri::parse(http_uri).unwrap(); + match &parsed { + MentionUri::Fetch { url } => { + assert_eq!(url.to_string(), http_uri); + } + _ => panic!("Expected Fetch variant"), + } + assert_eq!(parsed.to_uri().to_string(), http_uri); + } + + #[test] + fn test_parse_fetch_https_uri() { + let https_uri = "https://example.com/api/endpoint"; + let parsed = MentionUri::parse(https_uri).unwrap(); + match &parsed { + MentionUri::Fetch { url } => { + assert_eq!(url.to_string(), https_uri); + } + _ => panic!("Expected Fetch variant"), + } + assert_eq!(parsed.to_uri().to_string(), https_uri); + } + + #[test] + fn test_invalid_scheme() { + assert!(MentionUri::parse("ftp://example.com").is_err()); + assert!(MentionUri::parse("ssh://example.com").is_err()); + assert!(MentionUri::parse("unknown://example.com").is_err()); + } + + #[test] + fn test_invalid_zed_path() { + assert!(MentionUri::parse("zed:///invalid/path").is_err()); + assert!(MentionUri::parse("zed:///agent/unknown/test").is_err()); + } + + #[test] + fn test_invalid_line_range_format() { + // Missing L prefix + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#10:20")).is_err()); + + // Missing colon separator + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1020")).is_err()); + + // Invalid numbers + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc")).is_err()); + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20")).is_err()); + } + + #[test] + fn test_invalid_query_parameters() { + // Invalid query parameter name + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:20?invalid=test")).is_err()); + + // Too many query parameters + assert!( + MentionUri::parse(uri!( + "file:///path/to/file.rs#L10:20?symbol=test&another=param" + )) + .is_err() + ); + } + + #[test] + fn test_zero_based_line_numbers() { + // Test that 0-based line numbers are rejected (should be 1-based) + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:10")).is_err()); + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1:0")).is_err()); + assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:0")).is_err()); + } +} diff --git a/crates/acp_thread/src/terminal.rs b/crates/acp_thread/src/terminal.rs new file mode 100644 index 0000000000000000000000000000000000000000..41d7fb89bb2eb59207bf0a6557129a088b435f3a --- /dev/null +++ b/crates/acp_thread/src/terminal.rs @@ -0,0 +1,93 @@ +use gpui::{App, AppContext, Context, Entity}; +use language::LanguageRegistry; +use markdown::Markdown; +use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant}; + +pub struct Terminal { + command: Entity, + working_dir: Option, + terminal: Entity, + started_at: Instant, + output: Option, +} + +pub struct TerminalOutput { + pub ended_at: Instant, + pub exit_status: Option, + pub was_content_truncated: bool, + pub original_content_len: usize, + pub content_line_count: usize, + pub finished_with_empty_output: bool, +} + +impl Terminal { + pub fn new( + command: String, + working_dir: Option, + terminal: Entity, + language_registry: Arc, + cx: &mut Context, + ) -> Self { + Self { + command: cx.new(|cx| { + Markdown::new( + format!("```\n{}\n```", command).into(), + Some(language_registry.clone()), + None, + cx, + ) + }), + working_dir, + terminal, + started_at: Instant::now(), + output: None, + } + } + + pub fn finish( + &mut self, + exit_status: Option, + original_content_len: usize, + truncated_content_len: usize, + content_line_count: usize, + finished_with_empty_output: bool, + cx: &mut Context, + ) { + self.output = Some(TerminalOutput { + ended_at: Instant::now(), + exit_status, + was_content_truncated: truncated_content_len < original_content_len, + original_content_len, + content_line_count, + finished_with_empty_output, + }); + cx.notify(); + } + + pub fn command(&self) -> &Entity { + &self.command + } + + pub fn working_dir(&self) -> &Option { + &self.working_dir + } + + pub fn started_at(&self) -> Instant { + self.started_at + } + + pub fn output(&self) -> Option<&TerminalOutput> { + self.output.as_ref() + } + + pub fn inner(&self) -> &Entity { + &self.terminal + } + + pub fn to_markdown(&self, cx: &App) -> String { + format!( + "Terminal:\n```\n{}\n```\n", + self.terminal.read(cx).get_content() + ) + } +} diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..1a389e8859b24a320720ecfc3fa6cf2a13f274ad --- /dev/null +++ b/crates/action_log/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "action_log" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lib] +path = "src/action_log.rs" + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +buffer_diff.workspace = true +clock.workspace = true +collections.workspace = true +futures.workspace = true +gpui.workspace = true +language.workspace = true +project.workspace = true +text.workspace = true +util.workspace = true +watch.workspace = true +workspace-hack.workspace = true + + +[dev-dependencies] +buffer_diff = { workspace = true, features = ["test-support"] } +collections = { workspace = true, features = ["test-support"] } +clock = { workspace = true, features = ["test-support"] } +ctor.workspace = true +gpui = { workspace = true, features = ["test-support"] } +indoc.workspace = true +language = { workspace = true, features = ["test-support"] } +log.workspace = true +pretty_assertions.workspace = true +project = { workspace = true, features = ["test-support"] } +rand.workspace = true +serde_json.workspace = true +settings = { workspace = true, features = ["test-support"] } +text = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +zlog.workspace = true diff --git a/crates/indexed_docs/LICENSE-GPL b/crates/action_log/LICENSE-GPL similarity index 100% rename from crates/indexed_docs/LICENSE-GPL rename to crates/action_log/LICENSE-GPL diff --git a/crates/assistant_tool/src/action_log.rs b/crates/action_log/src/action_log.rs similarity index 97% rename from crates/assistant_tool/src/action_log.rs rename to crates/action_log/src/action_log.rs index 025aba060d9380390b06478d9ddc0ad9c4f52e5a..9ec10f4dbb0e670bf20d9c033db9cec02e5fda67 100644 --- a/crates/assistant_tool/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -17,8 +17,6 @@ use util::{ pub struct ActionLog { /// Buffers that we want to notify the model about when they change. tracked_buffers: BTreeMap, TrackedBuffer>, - /// Has the model edited a file since it last checked diagnostics? - edited_since_project_diagnostics_check: bool, /// The project this action log is associated with project: Entity, } @@ -28,7 +26,6 @@ impl ActionLog { pub fn new(project: Entity) -> Self { Self { tracked_buffers: BTreeMap::default(), - edited_since_project_diagnostics_check: false, project, } } @@ -37,16 +34,6 @@ impl ActionLog { &self.project } - /// Notifies a diagnostics check - pub fn checked_project_diagnostics(&mut self) { - self.edited_since_project_diagnostics_check = false; - } - - /// Returns true if any files have been edited since the last project diagnostics check - pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool { - self.edited_since_project_diagnostics_check - } - pub fn latest_snapshot(&self, buffer: &Entity) -> Option { Some(self.tracked_buffers.get(buffer)?.snapshot.clone()) } @@ -129,7 +116,7 @@ impl ActionLog { } else if buffer .read(cx) .file() - .map_or(false, |file| file.disk_state().exists()) + .is_some_and(|file| file.disk_state().exists()) { TrackedBufferStatus::Created { existing_file_content: Some(buffer.read(cx).as_rope().clone()), @@ -174,7 +161,7 @@ impl ActionLog { diff_base, last_seen_base, unreviewed_edits, - snapshot: text_snapshot.clone(), + snapshot: text_snapshot, status, version: buffer.read(cx).version(), diff, @@ -203,7 +190,7 @@ impl ActionLog { cx: &mut Context, ) { match event { - BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx), + BufferEvent::Edited => self.handle_buffer_edited(buffer, cx), BufferEvent::FileHandleChanged => { self.handle_buffer_file_changed(buffer, cx); } @@ -228,7 +215,7 @@ impl ActionLog { if buffer .read(cx) .file() - .map_or(false, |file| file.disk_state() == DiskState::Deleted) + .is_some_and(|file| file.disk_state() == DiskState::Deleted) { // If the buffer had been edited by a tool, but it got // deleted externally, we want to stop tracking it. @@ -240,7 +227,7 @@ impl ActionLog { if buffer .read(cx) .file() - .map_or(false, |file| file.disk_state() != DiskState::Deleted) + .is_some_and(|file| file.disk_state() != DiskState::Deleted) { // If the buffer had been deleted by a tool, but it got // resurrected externally, we want to clear the edits we @@ -277,15 +264,14 @@ impl ActionLog { if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) { cx.update(|cx| { let mut old_head = buffer_repo.read(cx).head_commit.clone(); - Some(cx.subscribe(git_diff, move |_, event, cx| match event { - buffer_diff::BufferDiffEvent::DiffChanged { .. } => { + Some(cx.subscribe(git_diff, move |_, event, cx| { + if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event { let new_head = buffer_repo.read(cx).head_commit.clone(); if new_head != old_head { old_head = new_head; git_diff_updates_tx.send(()).ok(); } } - _ => {} })) })? } else { @@ -303,7 +289,7 @@ impl ActionLog { } _ = git_diff_updates_rx.changed().fuse() => { if let Some(git_diff) = git_diff.as_ref() { - Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?; + Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?; } } } @@ -475,7 +461,7 @@ impl ActionLog { anyhow::Ok(( tracked_buffer.diff.clone(), buffer.read(cx).language().cloned(), - buffer.read(cx).language_registry().clone(), + buffer.read(cx).language_registry(), )) })??; let diff_snapshot = BufferDiff::update_diff( @@ -511,7 +497,7 @@ impl ActionLog { new: new_range, }, &new_diff_base, - &buffer_snapshot.as_rope(), + buffer_snapshot.as_rope(), )); } unreviewed_edits @@ -543,15 +529,12 @@ impl ActionLog { /// Mark a buffer as created by agent, so we can refresh it in the context pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { - self.edited_since_project_diagnostics_check = true; - self.track_buffer_internal(buffer.clone(), true, cx); + self.track_buffer_internal(buffer, true, cx); } /// Mark a buffer as edited by agent, so we can refresh it in the context pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { - self.edited_since_project_diagnostics_check = true; - - let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); + let tracked_buffer = self.track_buffer_internal(buffer, false, cx); if let TrackedBufferStatus::Deleted = tracked_buffer.status { tracked_buffer.status = TrackedBufferStatus::Modified; } @@ -630,10 +613,10 @@ impl ActionLog { false } }); - if tracked_buffer.unreviewed_edits.is_empty() { - if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status { - tracked_buffer.status = TrackedBufferStatus::Modified; - } + if tracked_buffer.unreviewed_edits.is_empty() + && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status + { + tracked_buffer.status = TrackedBufferStatus::Modified; } tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); } @@ -827,7 +810,7 @@ impl ActionLog { tracked.version != buffer.version && buffer .file() - .map_or(false, |file| file.disk_state() != DiskState::Deleted) + .is_some_and(|file| file.disk_state() != DiskState::Deleted) }) .map(|(buffer, _)| buffer) } @@ -863,7 +846,7 @@ fn apply_non_conflicting_edits( conflict = true; if new_edits .peek() - .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new)) + .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new)) { new_edit = new_edits.next().unwrap(); } else { @@ -980,7 +963,7 @@ impl TrackedBuffer { fn has_edits(&self, cx: &App) -> bool { self.diff .read(cx) - .hunks(&self.buffer.read(cx), cx) + .hunks(self.buffer.read(cx), cx) .next() .is_some() } @@ -2284,7 +2267,7 @@ mod tests { log::info!("quiescing..."); cx.run_until_parked(); action_log.update(cx, |log, cx| { - let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap(); + let tracked_buffer = log.tracked_buffers.get(buffer).unwrap(); let mut old_text = tracked_buffer.diff_base.clone(); let new_text = buffer.read(cx).as_rope(); for edit in tracked_buffer.unreviewed_edits.edits() { @@ -2442,7 +2425,7 @@ mod tests { assert_eq!( unreviewed_hunks(&action_log, cx), vec![( - buffer.clone(), + buffer, vec![ HunkStatus { range: Point::new(6, 0)..Point::new(7, 0), diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index f8ea7173d8afecb14a8e8ed9e1de6a87660ddc1a..6641db0805fed2fbade1e66cde143f58123dd3d4 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -103,26 +103,21 @@ impl ActivityIndicator { cx.subscribe_in( &workspace_handle, window, - |activity_indicator, _, event, window, cx| match event { - workspace::Event::ClearActivityIndicator { .. } => { - if activity_indicator.statuses.pop().is_some() { - activity_indicator.dismiss_error_message( - &DismissErrorMessage, - window, - cx, - ); - cx.notify(); - } + |activity_indicator, _, event, window, cx| { + if let workspace::Event::ClearActivityIndicator = event + && activity_indicator.statuses.pop().is_some() + { + activity_indicator.dismiss_error_message(&DismissErrorMessage, window, cx); + cx.notify(); } - _ => {} }, ) .detach(); cx.subscribe( &project.read(cx).lsp_store(), - |activity_indicator, _, event, cx| match event { - LspStoreEvent::LanguageServerUpdate { name, message, .. } => { + |activity_indicator, _, event, cx| { + if let LspStoreEvent::LanguageServerUpdate { name, message, .. } = event { if let proto::update_language_server::Variant::StatusUpdate(status_update) = message { @@ -191,7 +186,6 @@ impl ActivityIndicator { } cx.notify() } - _ => {} }, ) .detach(); @@ -206,9 +200,10 @@ impl ActivityIndicator { cx.subscribe( &project.read(cx).git_store().clone(), - |_, _, event: &GitStoreEvent, cx| match event { - project::git_store::GitStoreEvent::JobsUpdated => cx.notify(), - _ => {} + |_, _, event: &GitStoreEvent, cx| { + if let project::git_store::GitStoreEvent::JobsUpdated = event { + cx.notify() + } }, ) .detach(); @@ -458,26 +453,24 @@ impl ActivityIndicator { .map(|r| r.read(cx)) .and_then(Repository::current_job); // Show any long-running git command - if let Some(job_info) = current_job { - if Instant::now() - job_info.start >= GIT_OPERATION_DELAY { - return Some(Content { - icon: Some( - Icon::new(IconName::ArrowCircle) - .size(IconSize::Small) - .with_animation( - "arrow-circle", - Animation::new(Duration::from_secs(2)).repeat(), - |icon, delta| { - icon.transform(Transformation::rotate(percentage(delta))) - }, - ) - .into_any_element(), - ), - message: job_info.message.into(), - on_click: None, - tooltip_message: None, - }); - } + if let Some(job_info) = current_job + && Instant::now() - job_info.start >= GIT_OPERATION_DELAY + { + return Some(Content { + icon: Some( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), + ) + .into_any_element(), + ), + message: job_info.message.into(), + on_click: None, + tooltip_message: None, + }); } // Show any language server installation info. @@ -702,7 +695,7 @@ impl ActivityIndicator { on_click: Some(Arc::new(|this, window, cx| { this.dismiss_error_message(&DismissErrorMessage, window, cx) })), - tooltip_message: Some(Self::version_tooltip_message(&version)), + tooltip_message: Some(Self::version_tooltip_message(version)), }), AutoUpdateStatus::Installing { version } => Some(Content { icon: Some( @@ -714,21 +707,13 @@ impl ActivityIndicator { on_click: Some(Arc::new(|this, window, cx| { this.dismiss_error_message(&DismissErrorMessage, window, cx) })), - tooltip_message: Some(Self::version_tooltip_message(&version)), + tooltip_message: Some(Self::version_tooltip_message(version)), }), - AutoUpdateStatus::Updated { - binary_path, - version, - } => Some(Content { + AutoUpdateStatus::Updated { version } => Some(Content { icon: None, message: "Click to restart and update Zed".to_string(), - on_click: Some(Arc::new({ - let reload = workspace::Reload { - binary_path: Some(binary_path.clone()), - }; - move |_, _, cx| workspace::reload(&reload, cx) - })), - tooltip_message: Some(Self::version_tooltip_message(&version)), + on_click: Some(Arc::new(move |_, _, cx| workspace::reload(cx))), + tooltip_message: Some(Self::version_tooltip_message(version)), }), AutoUpdateStatus::Errored => Some(Content { icon: Some( @@ -748,21 +733,20 @@ impl ActivityIndicator { if let Some(extension_store) = ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx)) + && let Some(extension_id) = extension_store.outstanding_operations().keys().next() { - if let Some(extension_id) = extension_store.outstanding_operations().keys().next() { - return Some(Content { - icon: Some( - Icon::new(IconName::Download) - .size(IconSize::Small) - .into_any_element(), - ), - message: format!("Updating {extension_id} extension…"), - on_click: Some(Arc::new(|this, window, cx| { - this.dismiss_error_message(&DismissErrorMessage, window, cx) - })), - tooltip_message: None, - }); - } + return Some(Content { + icon: Some( + Icon::new(IconName::Download) + .size(IconSize::Small) + .into_any_element(), + ), + message: format!("Updating {extension_id} extension…"), + on_click: Some(Arc::new(|this, window, cx| { + this.dismiss_error_message(&DismissErrorMessage, window, cx) + })), + tooltip_message: None, + }); } None diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index 7bc0e82cadd12a4a5e0926cfc869af194431955d..391abb38fe826923b06646511c0dd6c5ce5c6ca4 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -19,6 +19,7 @@ test-support = [ ] [dependencies] +action_log.workspace = true agent_settings.workspace = true anyhow.workspace = true assistant_context.workspace = true @@ -30,7 +31,6 @@ collections.workspace = true component.workspace = true context_server.workspace = true convert_case.workspace = true -feature_flags.workspace = true fs.workspace = true futures.workspace = true git.workspace = true diff --git a/crates/agent/src/agent_profile.rs b/crates/agent/src/agent_profile.rs index 34ea1c8df7c4e2bbc58ac8a57b11655917c7aac2..c9e73372f60686cf330531926f4129e9c9b25db8 100644 --- a/crates/agent/src/agent_profile.rs +++ b/crates/agent/src/agent_profile.rs @@ -90,7 +90,7 @@ impl AgentProfile { return false; }; - return Self::is_enabled(settings, source, tool_name); + Self::is_enabled(settings, source, tool_name) } fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool { @@ -132,7 +132,7 @@ mod tests { }); let tool_set = default_tool_set(cx); - let profile = AgentProfile::new(id.clone(), tool_set); + let profile = AgentProfile::new(id, tool_set); let mut enabled_tools = cx .read(|cx| profile.enabled_tools(cx)) @@ -169,7 +169,7 @@ mod tests { }); let tool_set = default_tool_set(cx); - let profile = AgentProfile::new(id.clone(), tool_set); + let profile = AgentProfile::new(id, tool_set); let mut enabled_tools = cx .read(|cx| profile.enabled_tools(cx)) @@ -202,7 +202,7 @@ mod tests { }); let tool_set = default_tool_set(cx); - let profile = AgentProfile::new(id.clone(), tool_set); + let profile = AgentProfile::new(id, tool_set); let mut enabled_tools = cx .read(|cx| profile.enabled_tools(cx)) @@ -326,7 +326,7 @@ mod tests { _input: serde_json::Value, _request: Arc, _project: Entity, - _action_log: Entity, + _action_log: Entity, _model: Arc, _window: Option, _cx: &mut App, diff --git a/crates/agent/src/context.rs b/crates/agent/src/context.rs index cd366b8308abf8c9900d24633eea95d7ff92df6d..a94a933d864d204037b3d575cbdc4d85870aeddb 100644 --- a/crates/agent/src/context.rs +++ b/crates/agent/src/context.rs @@ -20,7 +20,7 @@ use text::{Anchor, OffsetRangeExt as _}; use util::markdown::MarkdownCodeBlock; use util::{ResultExt as _, post_inc}; -pub const RULES_ICON: IconName = IconName::Context; +pub const RULES_ICON: IconName = IconName::Reader; pub enum ContextKind { File, @@ -40,8 +40,8 @@ impl ContextKind { ContextKind::File => IconName::File, ContextKind::Directory => IconName::Folder, ContextKind::Symbol => IconName::Code, - ContextKind::Selection => IconName::Context, - ContextKind::FetchedUrl => IconName::Globe, + ContextKind::Selection => IconName::Reader, + ContextKind::FetchedUrl => IconName::ToolWeb, ContextKind::Thread => IconName::Thread, ContextKind::TextThread => IconName::TextThread, ContextKind::Rules => RULES_ICON, @@ -201,24 +201,24 @@ impl FileContextHandle { parse_status.changed().await.log_err(); } - if let Ok(snapshot) = buffer.read_with(cx, |buffer, _| buffer.snapshot()) { - if let Some(outline) = snapshot.outline(None) { - let items = outline - .items - .into_iter() - .map(|item| item.to_point(&snapshot)); - - if let Ok(outline_text) = - outline::render_outline(items, None, 0, usize::MAX).await - { - let context = AgentContext::File(FileContext { - handle: self, - full_path, - text: outline_text.into(), - is_outline: true, - }); - return Some((context, vec![buffer])); - } + if let Ok(snapshot) = buffer.read_with(cx, |buffer, _| buffer.snapshot()) + && let Some(outline) = snapshot.outline(None) + { + let items = outline + .items + .into_iter() + .map(|item| item.to_point(&snapshot)); + + if let Ok(outline_text) = + outline::render_outline(items, None, 0, usize::MAX).await + { + let context = AgentContext::File(FileContext { + handle: self, + full_path, + text: outline_text.into(), + is_outline: true, + }); + return Some((context, vec![buffer])); } } } @@ -362,7 +362,7 @@ impl Display for DirectoryContext { let mut is_first = true; for descendant in &self.descendants { if !is_first { - write!(f, "\n")?; + writeln!(f)?; } else { is_first = false; } @@ -650,7 +650,7 @@ impl TextThreadContextHandle { impl Display for TextThreadContext { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { // TODO: escape title? - write!(f, "\n", self.title)?; + writeln!(f, "", self.title)?; write!(f, "{}", self.text.trim())?; write!(f, "\n") } @@ -716,7 +716,7 @@ impl RulesContextHandle { impl Display for RulesContext { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(title) = &self.title { - write!(f, "Rules title: {}\n", title)?; + writeln!(f, "Rules title: {}", title)?; } let code_block = MarkdownCodeBlock { tag: "", diff --git a/crates/agent/src/context_server_tool.rs b/crates/agent/src/context_server_tool.rs index 85e8ac7451405a292af1679406f4d184fe709eb9..696c569356bca36adf54bc84ec52fa7295048b75 100644 --- a/crates/agent/src/context_server_tool.rs +++ b/crates/agent/src/context_server_tool.rs @@ -1,7 +1,8 @@ use std::sync::Arc; +use action_log::ActionLog; use anyhow::{Result, anyhow, bail}; -use assistant_tool::{ActionLog, Tool, ToolResult, ToolSource}; +use assistant_tool::{Tool, ToolResult, ToolSource}; use context_server::{ContextServerId, types}; use gpui::{AnyWindowHandle, App, Entity, Task}; use icons::IconName; @@ -85,15 +86,13 @@ impl Tool for ContextServerTool { ) -> ToolResult { if let Some(server) = self.store.read(cx).get_running_server(&self.server_id) { let tool_name = self.tool.name.clone(); - let server_clone = server.clone(); - let input_clone = input.clone(); cx.spawn(async move |_cx| { - let Some(protocol) = server_clone.client() else { + let Some(protocol) = server.client() else { bail!("Context server not initialized"); }; - let arguments = if let serde_json::Value::Object(map) = input_clone { + let arguments = if let serde_json::Value::Object(map) = input { Some(map.into_iter().collect()) } else { None diff --git a/crates/agent/src/context_store.rs b/crates/agent/src/context_store.rs index 60ba5527dcca22d81b7da62657c6abc00aa51607..b531852a184ffeaf86862990f03210ceb6033395 100644 --- a/crates/agent/src/context_store.rs +++ b/crates/agent/src/context_store.rs @@ -338,11 +338,9 @@ impl ContextStore { image_task, context_id: self.next_context_id.post_inc(), }); - if self.has_context(&context) { - if remove_if_exists { - self.remove_context(&context, cx); - return None; - } + if self.has_context(&context) && remove_if_exists { + self.remove_context(&context, cx); + return None; } self.insert_context(context.clone(), cx); diff --git a/crates/agent/src/history_store.rs b/crates/agent/src/history_store.rs index 89f75a72bd97054c2a6b233f0207accac956fcb5..8f4c1a1e2e6533b4760956c60bfc1a26123df92a 100644 --- a/crates/agent/src/history_store.rs +++ b/crates/agent/src/history_store.rs @@ -212,7 +212,16 @@ impl HistoryStore { fn load_recently_opened_entries(cx: &AsyncApp) -> Task>> { cx.background_spawn(async move { let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH); - let contents = smol::fs::read_to_string(path).await?; + let contents = match smol::fs::read_to_string(path).await { + Ok(it) => it, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + return Ok(Vec::new()); + } + Err(e) => { + return Err(e) + .context("deserializing persisted agent panel navigation history"); + } + }; let entries = serde_json::from_str::>(&contents) .context("deserializing persisted agent panel navigation history")? .into_iter() @@ -245,10 +254,9 @@ impl HistoryStore { } pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context) { - self.recently_opened_entries.retain(|entry| match entry { - HistoryEntryId::Thread(thread_id) if thread_id == &id => false, - _ => true, - }); + self.recently_opened_entries.retain( + |entry| !matches!(entry, HistoryEntryId::Thread(thread_id) if thread_id == &id), + ); self.save_recently_opened_entries(cx); } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 048aa4245d7ae527b094b0485a8bec048427ccec..7b70fde56ab1e7acb6705aeace82f142dc28a9f3 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -8,14 +8,17 @@ use crate::{ }, tool_use::{PendingToolUse, ToolUse, ToolUseMetadata, ToolUseState}, }; -use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_PROMPT}; +use action_log::ActionLog; +use agent_settings::{ + AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_DETAILED_PROMPT, + SUMMARIZE_THREAD_PROMPT, +}; use anyhow::{Result, anyhow}; -use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet}; +use assistant_tool::{AnyToolCard, Tool, ToolWorkingSet}; use chrono::{DateTime, Utc}; use client::{ModelRequestUsage, RequestUsage}; use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit}; use collections::HashMap; -use feature_flags::{self, FeatureFlagAppExt}; use futures::{FutureExt, StreamExt as _, future::Shared}; use git::repository::DiffType; use gpui::{ @@ -107,7 +110,7 @@ impl std::fmt::Display for PromptId { } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] -pub struct MessageId(pub(crate) usize); +pub struct MessageId(pub usize); impl MessageId { fn post_inc(&mut self) -> Self { @@ -178,7 +181,7 @@ impl Message { } } - pub fn to_string(&self) -> String { + pub fn to_message_content(&self) -> String { let mut result = String::new(); if !self.loaded_context.text.is_empty() { @@ -384,10 +387,8 @@ pub struct Thread { cumulative_token_usage: TokenUsage, exceeded_window_error: Option, tool_use_limit_reached: bool, - feedback: Option, retry_state: Option, message_feedback: HashMap, - last_auto_capture_at: Option, last_received_chunk_at: Option, request_callback: Option< Box])>, @@ -485,15 +486,13 @@ impl Thread { cumulative_token_usage: TokenUsage::default(), exceeded_window_error: None, tool_use_limit_reached: false, - feedback: None, retry_state: None, message_feedback: HashMap::default(), - last_auto_capture_at: None, last_error_context: None, last_received_chunk_at: None, request_callback: None, remaining_turns: u32::MAX, - configured_model: configured_model.clone(), + configured_model, profile: AgentProfile::new(profile_id, tools), } } @@ -531,7 +530,7 @@ impl Thread { .and_then(|model| { let model = SelectedModel { provider: model.provider.clone().into(), - model: model.model.clone().into(), + model: model.model.into(), }; registry.select_model(&model, cx) }) @@ -611,9 +610,7 @@ impl Thread { cumulative_token_usage: serialized.cumulative_token_usage, exceeded_window_error: None, tool_use_limit_reached: serialized.tool_use_limit_reached, - feedback: None, message_feedback: HashMap::default(), - last_auto_capture_at: None, last_error_context: None, last_received_chunk_at: None, request_callback: None, @@ -843,11 +840,17 @@ impl Thread { .await .unwrap_or(false); - if !equal { - this.update(cx, |this, cx| { - this.insert_checkpoint(pending_checkpoint, cx) - })?; - } + this.update(cx, |this, cx| { + this.pending_checkpoint = if equal { + Some(pending_checkpoint) + } else { + this.insert_checkpoint(pending_checkpoint, cx); + Some(ThreadCheckpoint { + message_id: this.next_message_id, + git_checkpoint: final_checkpoint, + }) + } + })?; Ok(()) } @@ -1026,8 +1029,6 @@ impl Thread { }); } - self.auto_capture_telemetry(cx); - message_id } @@ -1642,17 +1643,15 @@ impl Thread { }; self.tool_use - .request_tool_use(tool_message_id, tool_use, tool_use_metadata.clone(), cx); + .request_tool_use(tool_message_id, tool_use, tool_use_metadata, cx); - let pending_tool_use = self.tool_use.insert_tool_output( - tool_use_id.clone(), + self.tool_use.insert_tool_output( + tool_use_id, tool_name, tool_output, self.configured_model.as_ref(), self.completion_mode, - ); - - pending_tool_use + ) } pub fn stream_completion( @@ -1685,7 +1684,7 @@ impl Thread { self.last_received_chunk_at = Some(Instant::now()); let task = cx.spawn(async move |thread, cx| { - let stream_completion_future = model.stream_completion(request, &cx); + let stream_completion_future = model.stream_completion(request, cx); let initial_token_usage = thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage); let stream_completion = async { @@ -1817,7 +1816,7 @@ impl Thread { let streamed_input = if tool_use.is_input_complete { None } else { - Some((&tool_use.input).clone()) + Some(tool_use.input.clone()) }; let ui_text = thread.tool_use.request_tool_use( @@ -1899,7 +1898,6 @@ impl Thread { cx.emit(ThreadEvent::StreamedCompletion); cx.notify(); - thread.auto_capture_telemetry(cx); Ok(()) })??; @@ -1967,11 +1965,9 @@ impl Thread { if let Some(prev_message) = thread.messages.get(ix - 1) - { - if prev_message.role == Role::Assistant { + && prev_message.role == Role::Assistant { break; } - } } } @@ -2044,7 +2040,7 @@ impl Thread { retry_scheduled = thread .handle_retryable_error_with_delay( - &completion_error, + completion_error, Some(retry_strategy), model.clone(), intent, @@ -2074,8 +2070,6 @@ impl Thread { request_callback(request, response_events); } - thread.auto_capture_telemetry(cx); - if let Ok(initial_usage) = initial_token_usage { let usage = thread.cumulative_token_usage - initial_usage; @@ -2123,7 +2117,7 @@ impl Thread { self.pending_summary = cx.spawn(async move |this, cx| { let result = async { - let mut messages = model.model.stream_completion(request, &cx).await?; + let mut messages = model.model.stream_completion(request, cx).await?; let mut new_summary = String::new(); while let Some(event) = messages.next().await { @@ -2267,6 +2261,15 @@ impl Thread { max_attempts: 3, }) } + Other(err) + if err.is::() + || err.is::() => + { + // Retrying won't help for Payment Required or Model Request Limit errors (where + // the user must upgrade to usage-based billing to get more requests, or else wait + // for a significant amount of time for the request limit to reset). + None + } // Conservatively assume that any other errors are non-retryable HttpResponseError { .. } | Other(..) => Some(RetryStrategy::Fixed { delay: BASE_RETRY_DELAY, @@ -2422,12 +2425,10 @@ impl Thread { return; } - let added_user_message = include_str!("./prompts/summarize_thread_detailed_prompt.txt"); - let request = self.to_summarize_request( &model, CompletionIntent::ThreadContextSummarization, - added_user_message.into(), + SUMMARIZE_THREAD_DETAILED_PROMPT.into(), cx, ); @@ -2440,7 +2441,7 @@ impl Thread { // which result to prefer (the old task could complete after the new one, resulting in a // stale summary). self.detailed_summary_task = cx.spawn(async move |thread, cx| { - let stream = model.stream_completion_text(request, &cx); + let stream = model.stream_completion_text(request, cx); let Some(mut messages) = stream.await.log_err() else { thread .update(cx, |thread, _cx| { @@ -2469,13 +2470,13 @@ impl Thread { .ok()?; // Save thread so its summary can be reused later - if let Some(thread) = thread.upgrade() { - if let Ok(Ok(save_task)) = cx.update(|cx| { + if let Some(thread) = thread.upgrade() + && let Ok(Ok(save_task)) = cx.update(|cx| { thread_store .update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx)) - }) { - save_task.await.log_err(); - } + }) + { + save_task.await.log_err(); } Some(()) @@ -2520,7 +2521,6 @@ impl Thread { model: Arc, cx: &mut Context, ) -> Vec { - self.auto_capture_telemetry(cx); let request = Arc::new(self.to_completion_request(model.clone(), CompletionIntent::ToolResults, cx)); let pending_tool_uses = self @@ -2724,13 +2724,11 @@ impl Thread { window: Option, cx: &mut Context, ) { - if self.all_tools_finished() { - if let Some(ConfiguredModel { model, .. }) = self.configured_model.as_ref() { - if !canceled { - self.send_to_model(model.clone(), CompletionIntent::ToolResults, window, cx); - } - self.auto_capture_telemetry(cx); - } + if self.all_tools_finished() + && let Some(ConfiguredModel { model, .. }) = self.configured_model.as_ref() + && !canceled + { + self.send_to_model(model.clone(), CompletionIntent::ToolResults, window, cx); } cx.emit(ThreadEvent::ToolFinished { @@ -2786,10 +2784,6 @@ impl Thread { cx.emit(ThreadEvent::CancelEditing); } - pub fn feedback(&self) -> Option { - self.feedback - } - pub fn message_feedback(&self, message_id: MessageId) -> Option { self.message_feedback.get(&message_id).copied() } @@ -2822,7 +2816,7 @@ impl Thread { let message_content = self .message(message_id) - .map(|msg| msg.to_string()) + .map(|msg| msg.to_message_content()) .unwrap_or_default(); cx.background_spawn(async move { @@ -2851,52 +2845,6 @@ impl Thread { }) } - pub fn report_feedback( - &mut self, - feedback: ThreadFeedback, - cx: &mut Context, - ) -> Task> { - let last_assistant_message_id = self - .messages - .iter() - .rev() - .find(|msg| msg.role == Role::Assistant) - .map(|msg| msg.id); - - if let Some(message_id) = last_assistant_message_id { - self.report_message_feedback(message_id, feedback, cx) - } else { - let final_project_snapshot = Self::project_snapshot(self.project.clone(), cx); - let serialized_thread = self.serialize(cx); - let thread_id = self.id().clone(); - let client = self.project.read(cx).client(); - self.feedback = Some(feedback); - cx.notify(); - - cx.background_spawn(async move { - let final_project_snapshot = final_project_snapshot.await; - let serialized_thread = serialized_thread.await?; - let thread_data = serde_json::to_value(serialized_thread) - .unwrap_or_else(|_| serde_json::Value::Null); - - let rating = match feedback { - ThreadFeedback::Positive => "positive", - ThreadFeedback::Negative => "negative", - }; - telemetry::event!( - "Assistant Thread Rated", - rating, - thread_id, - thread_data, - final_project_snapshot - ); - client.telemetry().flush_events().await; - - Ok(()) - }) - } - } - /// Create a snapshot of the current project state including git information and unsaved buffers. fn project_snapshot( project: Entity, @@ -2917,11 +2865,11 @@ impl Thread { let buffer_store = project.read(app_cx).buffer_store(); for buffer_handle in buffer_store.read(app_cx).buffers() { let buffer = buffer_handle.read(app_cx); - if buffer.is_dirty() { - if let Some(file) = buffer.file() { - let path = file.path().to_string_lossy().to_string(); - unsaved_buffers.push(path); - } + if buffer.is_dirty() + && let Some(file) = buffer.file() + { + let path = file.path().to_string_lossy().to_string(); + unsaved_buffers.push(path); } } }) @@ -3131,50 +3079,6 @@ impl Thread { &self.project } - pub fn auto_capture_telemetry(&mut self, cx: &mut Context) { - if !cx.has_flag::() { - return; - } - - let now = Instant::now(); - if let Some(last) = self.last_auto_capture_at { - if now.duration_since(last).as_secs() < 10 { - return; - } - } - - self.last_auto_capture_at = Some(now); - - let thread_id = self.id().clone(); - let github_login = self - .project - .read(cx) - .user_store() - .read(cx) - .current_user() - .map(|user| user.github_login.clone()); - let client = self.project.read(cx).client(); - let serialize_task = self.serialize(cx); - - cx.background_executor() - .spawn(async move { - if let Ok(serialized_thread) = serialize_task.await { - if let Ok(thread_data) = serde_json::to_value(serialized_thread) { - telemetry::event!( - "Agent Thread Auto-Captured", - thread_id = thread_id.to_string(), - thread_data = thread_data, - auto_capture_reason = "tracked_user", - github_login = github_login - ); - - client.telemetry().flush_events().await; - } - } - }) - .detach(); - } - pub fn cumulative_token_usage(&self) -> TokenUsage { self.cumulative_token_usage } @@ -3217,13 +3121,13 @@ impl Thread { .model .max_token_count_for_mode(self.completion_mode().into()); - if let Some(exceeded_error) = &self.exceeded_window_error { - if model.model.id() == exceeded_error.model_id { - return Some(TotalTokenUsage { - total: exceeded_error.token_count, - max, - }); - } + if let Some(exceeded_error) = &self.exceeded_window_error + && model.model.id() == exceeded_error.model_id + { + return Some(TotalTokenUsage { + total: exceeded_error.token_count, + max, + }); } let total = self @@ -3284,7 +3188,7 @@ impl Thread { self.configured_model.as_ref(), self.completion_mode, ); - self.tool_finished(tool_use_id.clone(), None, true, window, cx); + self.tool_finished(tool_use_id, None, true, window, cx); } } @@ -3916,7 +3820,7 @@ fn main() {{ AgentSettings { model_parameters: vec![LanguageModelParameters { provider: Some(model.provider_id().0.to_string().into()), - model: Some(model.id().0.clone()), + model: Some(model.id().0), temperature: Some(0.66), }], ..AgentSettings::get_global(cx).clone() @@ -3936,7 +3840,7 @@ fn main() {{ AgentSettings { model_parameters: vec![LanguageModelParameters { provider: None, - model: Some(model.id().0.clone()), + model: Some(model.id().0), temperature: Some(0.66), }], ..AgentSettings::get_global(cx).clone() @@ -3976,7 +3880,7 @@ fn main() {{ AgentSettings { model_parameters: vec![LanguageModelParameters { provider: Some("anthropic".into()), - model: Some(model.id().0.clone()), + model: Some(model.id().0), temperature: Some(0.66), }], ..AgentSettings::get_global(cx).clone() @@ -4027,7 +3931,7 @@ fn main() {{ }); let fake_model = model.as_fake(); - simulate_successful_response(&fake_model, cx); + simulate_successful_response(fake_model, cx); // Should start generating summary when there are >= 2 messages thread.read_with(cx, |thread, _| { @@ -4122,7 +4026,7 @@ fn main() {{ }); let fake_model = model.as_fake(); - simulate_successful_response(&fake_model, cx); + simulate_successful_response(fake_model, cx); thread.read_with(cx, |thread, _| { // State is still Error, not Generating @@ -5321,7 +5225,7 @@ fn main() {{ } #[gpui::test] - async fn test_retry_cancelled_on_stop(cx: &mut TestAppContext) { + async fn test_retry_canceled_on_stop(cx: &mut TestAppContext) { init_test_settings(cx); let project = create_test_project(cx, json!({})).await; @@ -5377,7 +5281,7 @@ fn main() {{ "Should have no pending completions after cancellation" ); - // Verify the retry was cancelled by checking retry state + // Verify the retry was canceled by checking retry state thread.read_with(cx, |thread, _| { if let Some(retry_state) = &thread.retry_state { panic!( @@ -5404,7 +5308,7 @@ fn main() {{ }); let fake_model = model.as_fake(); - simulate_successful_response(&fake_model, cx); + simulate_successful_response(fake_model, cx); thread.read_with(cx, |thread, _| { assert!(matches!(thread.summary(), ThreadSummary::Generating)); diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index cc7cb50c9195a6a9a5bd0e0e1a17bd76caf153ee..45e551dbdf01425f8ecd454e2808d1ea1cf94c51 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -42,7 +42,7 @@ use std::{ use util::ResultExt as _; pub static ZED_STATELESS: std::sync::LazyLock = - std::sync::LazyLock::new(|| std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty())); + std::sync::LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty())); #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum DataType { @@ -74,7 +74,7 @@ impl Column for DataType { } } -const RULES_FILE_NAMES: [&'static str; 9] = [ +const RULES_FILE_NAMES: [&str; 9] = [ ".rules", ".cursorrules", ".windsurfrules", @@ -205,6 +205,22 @@ impl ThreadStore { (this, ready_rx) } + #[cfg(any(test, feature = "test-support"))] + pub fn fake(project: Entity, cx: &mut App) -> Self { + Self { + project, + tools: cx.new(|_| ToolWorkingSet::default()), + prompt_builder: Arc::new(PromptBuilder::new(None).unwrap()), + prompt_store: None, + context_server_tool_ids: HashMap::default(), + threads: Vec::new(), + project_context: SharedProjectContext::default(), + reload_system_prompt_tx: mpsc::channel(0).0, + _reload_system_prompt_task: Task::ready(()), + _subscriptions: vec![], + } + } + fn handle_project_event( &mut self, _project: Entity, @@ -565,33 +581,32 @@ impl ThreadStore { return; }; - if protocol.capable(context_server::protocol::ServerCapability::Tools) { - if let Some(response) = protocol + if protocol.capable(context_server::protocol::ServerCapability::Tools) + && let Some(response) = protocol .request::(()) .await .log_err() - { - let tool_ids = tool_working_set - .update(cx, |tool_working_set, cx| { - tool_working_set.extend( - response.tools.into_iter().map(|tool| { - Arc::new(ContextServerTool::new( - context_server_store.clone(), - server.id(), - tool, - )) as Arc - }), - cx, - ) - }) - .log_err(); - - if let Some(tool_ids) = tool_ids { - this.update(cx, |this, _| { - this.context_server_tool_ids.insert(server_id, tool_ids); - }) - .log_err(); - } + { + let tool_ids = tool_working_set + .update(cx, |tool_working_set, cx| { + tool_working_set.extend( + response.tools.into_iter().map(|tool| { + Arc::new(ContextServerTool::new( + context_server_store.clone(), + server.id(), + tool, + )) as Arc + }), + cx, + ) + }) + .log_err(); + + if let Some(tool_ids) = tool_ids { + this.update(cx, |this, _| { + this.context_server_tool_ids.insert(server_id, tool_ids); + }) + .log_err(); } } }) @@ -681,13 +696,14 @@ impl SerializedThreadV0_1_0 { let mut messages: Vec = Vec::with_capacity(self.0.messages.len()); for message in self.0.messages { - if message.role == Role::User && !message.tool_results.is_empty() { - if let Some(last_message) = messages.last_mut() { - debug_assert!(last_message.role == Role::Assistant); - - last_message.tool_results = message.tool_results; - continue; - } + if message.role == Role::User + && !message.tool_results.is_empty() + && let Some(last_message) = messages.last_mut() + { + debug_assert!(last_message.role == Role::Assistant); + + last_message.tool_results = message.tool_results; + continue; } messages.push(message); @@ -877,7 +893,7 @@ impl ThreadsDatabase { let needs_migration_from_heed = mdb_path.exists(); - let connection = if *ZED_STATELESS { + let connection = if *ZED_STATELESS || cfg!(any(feature = "test-support", test)) { Connection::open_memory(Some("THREAD_FALLBACK_DB")) } else { Connection::open_file(&sqlite_path.to_string_lossy()) diff --git a/crates/agent/src/tool_use.rs b/crates/agent/src/tool_use.rs index 7392c0878d17adf8038292b10a7a8c349d3ec4e8..962dca591fb66f4679d44b8e8a4733c879bc2e0c 100644 --- a/crates/agent/src/tool_use.rs +++ b/crates/agent/src/tool_use.rs @@ -112,19 +112,13 @@ impl ToolUseState { }, ); - if let Some(window) = &mut window { - if let Some(tool) = this.tools.read(cx).tool(tool_use, cx) { - if let Some(output) = tool_result.output.clone() { - if let Some(card) = tool.deserialize_card( - output, - project.clone(), - window, - cx, - ) { - this.tool_result_cards.insert(tool_use_id, card); - } - } - } + if let Some(window) = &mut window + && let Some(tool) = this.tools.read(cx).tool(tool_use, cx) + && let Some(output) = tool_result.output.clone() + && let Some(card) = + tool.deserialize_card(output, project.clone(), window, cx) + { + this.tool_result_cards.insert(tool_use_id, card); } } } @@ -137,7 +131,7 @@ impl ToolUseState { } pub fn cancel_pending(&mut self) -> Vec { - let mut cancelled_tool_uses = Vec::new(); + let mut canceled_tool_uses = Vec::new(); self.pending_tool_uses_by_id .retain(|tool_use_id, tool_use| { if matches!(tool_use.status, PendingToolUseStatus::Error { .. }) { @@ -155,10 +149,10 @@ impl ToolUseState { is_error: true, }, ); - cancelled_tool_uses.push(tool_use.clone()); + canceled_tool_uses.push(tool_use.clone()); false }); - cancelled_tool_uses + canceled_tool_uses } pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> { @@ -281,7 +275,7 @@ impl ToolUseState { pub fn message_has_tool_results(&self, assistant_message_id: MessageId) -> bool { self.tool_uses_by_assistant_message .get(&assistant_message_id) - .map_or(false, |results| !results.is_empty()) + .is_some_and(|results| !results.is_empty()) } pub fn tool_result( diff --git a/crates/agent2/Cargo.toml b/crates/agent2/Cargo.toml index a75011a67161a3523dfbaf6be716ac110b200543..8dd79062f89eee6eb6525f8a699612b5ffbf248a 100644 --- a/crates/agent2/Cargo.toml +++ b/crates/agent2/Cargo.toml @@ -1,34 +1,54 @@ [package] name = "agent2" version = "0.1.0" -edition = "2021" +edition.workspace = true +publish.workspace = true license = "GPL-3.0-or-later" -publish = false [lib] path = "src/agent2.rs" +[features] +test-support = ["db/test-support"] +e2e = [] + [lints] workspace = true [dependencies] acp_thread.workspace = true +action_log.workspace = true +agent.workspace = true agent-client-protocol.workspace = true agent_servers.workspace = true +agent_settings.workspace = true anyhow.workspace = true +assistant_context.workspace = true assistant_tool.workspace = true +assistant_tools.workspace = true +chrono.workspace = true +client.workspace = true cloud_llm_client.workspace = true collections.workspace = true +context_server.workspace = true +db.workspace = true fs.workspace = true futures.workspace = true +git.workspace = true gpui.workspace = true handlebars = { workspace = true, features = ["rust-embed"] } +html_to_markdown.workspace = true +http_client.workspace = true indoc.workspace = true itertools.workspace = true language.workspace = true language_model.workspace = true language_models.workspace = true log.workspace = true +open.workspace = true +parking_lot.workspace = true +paths.workspace = true +portable-pty.workspace = true project.workspace = true prompt_store.workspace = true rust-embed.workspace = true @@ -37,24 +57,46 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smol.workspace = true +sqlez.workspace = true +task.workspace = true +telemetry.workspace = true +terminal.workspace = true +text.workspace = true ui.workspace = true util.workspace = true uuid.workspace = true watch.workspace = true +web_search.workspace = true +which.workspace = true workspace-hack.workspace = true +zstd.workspace = true [dev-dependencies] +agent = { workspace = true, "features" = ["test-support"] } +agent_servers = { workspace = true, "features" = ["test-support"] } +assistant_context = { workspace = true, "features" = ["test-support"] } ctor.workspace = true client = { workspace = true, "features" = ["test-support"] } clock = { workspace = true, "features" = ["test-support"] } +context_server = { workspace = true, "features" = ["test-support"] } +db = { workspace = true, "features" = ["test-support"] } +editor = { workspace = true, "features" = ["test-support"] } env_logger.workspace = true fs = { workspace = true, "features" = ["test-support"] } +git = { workspace = true, "features" = ["test-support"] } gpui = { workspace = true, "features" = ["test-support"] } gpui_tokio.workspace = true language = { workspace = true, "features" = ["test-support"] } language_model = { workspace = true, "features" = ["test-support"] } +lsp = { workspace = true, "features" = ["test-support"] } +pretty_assertions.workspace = true project = { workspace = true, "features" = ["test-support"] } reqwest_client.workspace = true settings = { workspace = true, "features" = ["test-support"] } +tempfile.workspace = true +terminal = { workspace = true, "features" = ["test-support"] } +theme = { workspace = true, "features" = ["test-support"] } +tree-sitter-rust.workspace = true +unindent = { workspace = true } worktree = { workspace = true, "features" = ["test-support"] } -pretty_assertions.workspace = true +zlog.workspace = true diff --git a/crates/agent2/src/agent.rs b/crates/agent2/src/agent.rs index 2014d86fb7b49cf508c5e89eecc5a3cb43a71d7e..d5bc0fea63018c4023ebc94aee4d6d082a88c4cb 100644 --- a/crates/agent2/src/agent.rs +++ b/crates/agent2/src/agent.rs @@ -1,25 +1,34 @@ -use crate::{templates::Templates, AgentResponseEvent, Thread}; -use crate::{FindPathTool, ReadFileTool, ThinkingTool, ToolCallAuthorization}; -use acp_thread::ModelSelector; +use crate::{ + ContextServerRegistry, Thread, ThreadEvent, ThreadsDatabase, ToolCallAuthorization, + UserMessageContent, templates::Templates, +}; +use crate::{HistoryStore, TokenUsageUpdated}; +use acp_thread::{AcpThread, AgentModelSelector}; +use action_log::ActionLog; use agent_client_protocol as acp; -use anyhow::{anyhow, Context as _, Result}; -use futures::{future, StreamExt}; +use agent_settings::AgentSettings; +use anyhow::{Context as _, Result, anyhow}; +use collections::{HashSet, IndexMap}; +use fs::Fs; +use futures::channel::mpsc; +use futures::{StreamExt, future}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, }; -use language_model::{LanguageModel, LanguageModelRegistry}; +use language_model::{LanguageModel, LanguageModelProvider, LanguageModelRegistry}; use project::{Project, ProjectItem, ProjectPath, Worktree}; use prompt_store::{ ProjectContext, PromptId, PromptStore, RulesFileContext, UserRulesContext, WorktreeContext, }; -use std::cell::RefCell; +use settings::update_settings_file; +use std::any::Any; use std::collections::HashMap; use std::path::Path; use std::rc::Rc; use std::sync::Arc; use util::ResultExt; -const RULES_FILE_NAMES: [&'static str; 9] = [ +const RULES_FILE_NAMES: [&str; 9] = [ ".rules", ".cursorrules", ".windsurfrules", @@ -41,28 +50,134 @@ struct Session { thread: Entity, /// The ACP thread that handles protocol communication acp_thread: WeakEntity, - _subscription: Subscription, + pending_save: Task<()>, + _subscriptions: Vec, +} + +pub struct LanguageModels { + /// Access language model by ID + models: HashMap>, + /// Cached list for returning language model information + model_list: acp_thread::AgentModelList, + refresh_models_rx: watch::Receiver<()>, + refresh_models_tx: watch::Sender<()>, +} + +impl LanguageModels { + fn new(cx: &App) -> Self { + let (refresh_models_tx, refresh_models_rx) = watch::channel(()); + let mut this = Self { + models: HashMap::default(), + model_list: acp_thread::AgentModelList::Grouped(IndexMap::default()), + refresh_models_rx, + refresh_models_tx, + }; + this.refresh_list(cx); + this + } + + fn refresh_list(&mut self, cx: &App) { + let providers = LanguageModelRegistry::global(cx) + .read(cx) + .providers() + .into_iter() + .filter(|provider| provider.is_authenticated(cx)) + .collect::>(); + + let mut language_model_list = IndexMap::default(); + let mut recommended_models = HashSet::default(); + + let mut recommended = Vec::new(); + for provider in &providers { + for model in provider.recommended_models(cx) { + recommended_models.insert(model.id()); + recommended.push(Self::map_language_model_to_info(&model, provider)); + } + } + if !recommended.is_empty() { + language_model_list.insert( + acp_thread::AgentModelGroupName("Recommended".into()), + recommended, + ); + } + + let mut models = HashMap::default(); + for provider in providers { + let mut provider_models = Vec::new(); + for model in provider.provided_models(cx) { + let model_info = Self::map_language_model_to_info(&model, &provider); + let model_id = model_info.id.clone(); + if !recommended_models.contains(&model.id()) { + provider_models.push(model_info); + } + models.insert(model_id, model); + } + if !provider_models.is_empty() { + language_model_list.insert( + acp_thread::AgentModelGroupName(provider.name().0.clone()), + provider_models, + ); + } + } + + self.models = models; + self.model_list = acp_thread::AgentModelList::Grouped(language_model_list); + self.refresh_models_tx.send(()).ok(); + } + + fn watch(&self) -> watch::Receiver<()> { + self.refresh_models_rx.clone() + } + + pub fn model_from_id( + &self, + model_id: &acp_thread::AgentModelId, + ) -> Option> { + self.models.get(model_id).cloned() + } + + fn map_language_model_to_info( + model: &Arc, + provider: &Arc, + ) -> acp_thread::AgentModelInfo { + acp_thread::AgentModelInfo { + id: Self::model_id(model), + name: model.name().0, + icon: Some(provider.icon()), + } + } + + fn model_id(model: &Arc) -> acp_thread::AgentModelId { + acp_thread::AgentModelId(format!("{}/{}", model.provider_id().0, model.id().0).into()) + } } pub struct NativeAgent { /// Session ID -> Session mapping sessions: HashMap, + history: Entity, /// Shared project context for all threads - project_context: Rc>, + project_context: Entity, project_context_needs_refresh: watch::Sender<()>, _maintain_project_context: Task>, + context_server_registry: Entity, /// Shared templates for all threads templates: Arc, + /// Cached model information + models: LanguageModels, project: Entity, prompt_store: Option>, + fs: Arc, _subscriptions: Vec, } impl NativeAgent { pub async fn new( project: Entity, + history: Entity, templates: Arc, prompt_store: Option>, + fs: Arc, cx: &mut AsyncApp, ) -> Result> { log::info!("Creating new NativeAgent"); @@ -72,7 +187,13 @@ impl NativeAgent { .await; cx.new(|cx| { - let mut subscriptions = vec![cx.subscribe(&project, Self::handle_project_event)]; + let mut subscriptions = vec![ + cx.subscribe(&project, Self::handle_project_event), + cx.subscribe( + &LanguageModelRegistry::global(cx), + Self::handle_models_updated_event, + ), + ]; if let Some(prompt_store) = prompt_store.as_ref() { subscriptions.push(cx.subscribe(prompt_store, Self::handle_prompts_updated_event)) } @@ -81,19 +202,79 @@ impl NativeAgent { watch::channel(()); Self { sessions: HashMap::new(), - project_context: Rc::new(RefCell::new(project_context)), + history, + project_context: cx.new(|_| project_context), project_context_needs_refresh: project_context_needs_refresh_tx, _maintain_project_context: cx.spawn(async move |this, cx| { Self::maintain_project_context(this, project_context_needs_refresh_rx, cx).await }), + context_server_registry: cx.new(|cx| { + ContextServerRegistry::new(project.read(cx).context_server_store(), cx) + }), templates, + models: LanguageModels::new(cx), project, prompt_store, + fs, _subscriptions: subscriptions, } }) } + fn register_session( + &mut self, + thread_handle: Entity, + cx: &mut Context, + ) -> Entity { + let connection = Rc::new(NativeAgentConnection(cx.entity())); + let registry = LanguageModelRegistry::read_global(cx); + let summarization_model = registry.thread_summary_model().map(|c| c.model); + + thread_handle.update(cx, |thread, cx| { + thread.set_summarization_model(summarization_model, cx); + thread.add_default_tools(cx) + }); + + let thread = thread_handle.read(cx); + let session_id = thread.id().clone(); + let title = thread.title(); + let project = thread.project.clone(); + let action_log = thread.action_log.clone(); + let acp_thread = cx.new(|_cx| { + acp_thread::AcpThread::new( + title, + connection, + project.clone(), + action_log.clone(), + session_id.clone(), + ) + }); + let subscriptions = vec![ + cx.observe_release(&acp_thread, |this, acp_thread, _cx| { + this.sessions.remove(acp_thread.session_id()); + }), + cx.subscribe(&thread_handle, Self::handle_thread_token_usage_updated), + cx.observe(&thread_handle, move |this, thread, cx| { + this.save_thread(thread, cx) + }), + ]; + + self.sessions.insert( + session_id, + Session { + thread: thread_handle, + acp_thread: acp_thread.downgrade(), + _subscriptions: subscriptions, + pending_save: Task::ready(()), + }, + ); + acp_thread + } + + pub fn models(&self) -> &LanguageModels { + &self.models + } + async fn maintain_project_context( this: WeakEntity, mut needs_refresh: watch::Receiver<()>, @@ -105,7 +286,9 @@ impl NativeAgent { Self::build_project_context(&this.project, this.prompt_store.as_ref(), cx) })? .await; - this.update(cx, |this, _| this.project_context.replace(project_context))?; + this.update(cx, |this, cx| { + this.project_context = cx.new(|_| project_context); + })?; } Ok(()) @@ -258,6 +441,23 @@ impl NativeAgent { }) } + fn handle_thread_token_usage_updated( + &mut self, + thread: Entity, + usage: &TokenUsageUpdated, + cx: &mut Context, + ) { + let Some(session) = self.sessions.get(thread.read(cx).id()) else { + return; + }; + session + .acp_thread + .update(cx, |acp_thread, cx| { + acp_thread.update_token_usage(usage.0.clone(), cx); + }) + .ok(); + } + fn handle_project_event( &mut self, _project: Entity, @@ -289,244 +489,202 @@ impl NativeAgent { ) { self.project_context_needs_refresh.send(()).ok(); } -} - -/// Wrapper struct that implements the AgentConnection trait -#[derive(Clone)] -pub struct NativeAgentConnection(pub Entity); -impl ModelSelector for NativeAgentConnection { - fn list_models(&self, cx: &mut AsyncApp) -> Task>>> { - log::debug!("NativeAgentConnection::list_models called"); - cx.spawn(async move |cx| { - cx.update(|cx| { - let registry = LanguageModelRegistry::read_global(cx); - let models = registry.available_models(cx).collect::>(); - log::info!("Found {} available models", models.len()); - if models.is_empty() { - Err(anyhow::anyhow!("No models available")) - } else { - Ok(models) - } - })? - }) - } + fn handle_models_updated_event( + &mut self, + _registry: Entity, + _event: &language_model::Event, + cx: &mut Context, + ) { + self.models.refresh_list(cx); - fn select_model( - &self, - session_id: acp::SessionId, - model: Arc, - cx: &mut AsyncApp, - ) -> Task> { - log::info!( - "Setting model for session {}: {:?}", - session_id, - model.name() - ); - let agent = self.0.clone(); + let registry = LanguageModelRegistry::read_global(cx); + let default_model = registry.default_model().map(|m| m.model); + let summarization_model = registry.thread_summary_model().map(|m| m.model); - cx.spawn(async move |cx| { - agent.update(cx, |agent, cx| { - if let Some(session) = agent.sessions.get(&session_id) { - session.thread.update(cx, |thread, _cx| { - thread.selected_model = model; - }); - Ok(()) - } else { - Err(anyhow!("Session not found")) + for session in self.sessions.values_mut() { + session.thread.update(cx, |thread, cx| { + if thread.model().is_none() + && let Some(model) = default_model.clone() + { + thread.set_model(model, cx); + cx.notify(); } - })? - }) - } - - fn selected_model( - &self, - session_id: &acp::SessionId, - cx: &mut AsyncApp, - ) -> Task>> { - let agent = self.0.clone(); - let session_id = session_id.clone(); - cx.spawn(async move |cx| { - let thread = agent - .read_with(cx, |agent, _| { - agent - .sessions - .get(&session_id) - .map(|session| session.thread.clone()) - })? - .ok_or_else(|| anyhow::anyhow!("Session not found"))?; - let selected = thread.read_with(cx, |thread, _| thread.selected_model.clone())?; - Ok(selected) - }) + thread.set_summarization_model(summarization_model.clone(), cx); + }); + } } -} - -impl acp_thread::AgentConnection for NativeAgentConnection { - fn new_thread( - self: Rc, - project: Entity, - cwd: &Path, - cx: &mut AsyncApp, - ) -> Task>> { - let agent = self.0.clone(); - log::info!("Creating new thread for project at: {:?}", cwd); - - cx.spawn(async move |cx| { - log::debug!("Starting thread creation in async context"); - // Generate session ID - let session_id = acp::SessionId(uuid::Uuid::new_v4().to_string().into()); - log::info!("Created session with ID: {}", session_id); + pub fn open_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task>> { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let database = database_future.await.map_err(|err| anyhow!(err))?; + let db_thread = database + .load_thread(id.clone()) + .await? + .with_context(|| format!("no thread found with ID: {id:?}"))?; - // Create AcpThread - let acp_thread = cx.update(|cx| { + let thread = this.update(cx, |this, cx| { + let action_log = cx.new(|_cx| ActionLog::new(this.project.clone())); cx.new(|cx| { - acp_thread::AcpThread::new("agent2", self.clone(), project.clone(), session_id.clone(), cx) + Thread::from_db( + id.clone(), + db_thread, + this.project.clone(), + this.project_context.clone(), + this.context_server_registry.clone(), + action_log.clone(), + this.templates.clone(), + cx, + ) }) })?; - let action_log = cx.update(|cx| acp_thread.read(cx).action_log().clone())?; - - // Create Thread - let thread = agent.update( - cx, - |agent, cx: &mut gpui::Context| -> Result<_> { - // Fetch default model from registry settings - let registry = LanguageModelRegistry::read_global(cx); - - // Log available models for debugging - let available_count = registry.available_models(cx).count(); - log::debug!("Total available models: {}", available_count); - - let default_model = registry - .default_model() - .map(|configured| { - log::info!( - "Using configured default model: {:?} from provider: {:?}", - configured.model.name(), - configured.provider.name() - ); - configured.model - }) - .ok_or_else(|| { - log::warn!("No default model configured in settings"); - anyhow!("No default model configured. Please configure a default model in settings.") - })?; - - let thread = cx.new(|_| { - let mut thread = Thread::new(project.clone(), agent.project_context.clone(), action_log.clone(), agent.templates.clone(), default_model); - thread.add_tool(ThinkingTool); - thread.add_tool(FindPathTool::new(project.clone())); - thread.add_tool(ReadFileTool::new(project.clone(), action_log)); - thread - }); - - Ok(thread) - }, - )??; - - // Store the session - agent.update(cx, |agent, cx| { - agent.sessions.insert( - session_id, - Session { - thread, - acp_thread: acp_thread.downgrade(), - _subscription: cx.observe_release(&acp_thread, |this, acp_thread, _cx| { - this.sessions.remove(acp_thread.session_id()); - }) - }, - ); - })?; - + let acp_thread = + this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?; + let events = thread.update(cx, |thread, cx| thread.replay(cx))?; + cx.update(|cx| { + NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx) + })? + .await?; Ok(acp_thread) }) } - fn auth_methods(&self) -> &[acp::AuthMethod] { - &[] // No auth for in-process + pub fn thread_summary( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task> { + let thread = self.open_thread(id.clone(), cx); + cx.spawn(async move |this, cx| { + let acp_thread = thread.await?; + let result = this + .update(cx, |this, cx| { + this.sessions + .get(&id) + .unwrap() + .thread + .update(cx, |thread, cx| thread.summary(cx)) + })? + .await?; + drop(acp_thread); + Ok(result) + }) } - fn authenticate(&self, _method: acp::AuthMethodId, _cx: &mut App) -> Task> { - Task::ready(Ok(())) + fn save_thread(&mut self, thread: Entity, cx: &mut Context) { + if thread.read(cx).is_empty() { + return; + } + + let database_future = ThreadsDatabase::connect(cx); + let (id, db_thread) = + thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx))); + let Some(session) = self.sessions.get_mut(&id) else { + return; + }; + let history = self.history.clone(); + session.pending_save = cx.spawn(async move |_, cx| { + let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { + return; + }; + let db_thread = db_thread.await; + database.save_thread(id, db_thread).await.log_err(); + history.update(cx, |history, cx| history.reload(cx)).ok(); + }); } +} - fn model_selector(&self) -> Option> { - Some(Rc::new(self.clone()) as Rc) +/// Wrapper struct that implements the AgentConnection trait +#[derive(Clone)] +pub struct NativeAgentConnection(pub Entity); + +impl NativeAgentConnection { + pub fn thread(&self, session_id: &acp::SessionId, cx: &App) -> Option> { + self.0 + .read(cx) + .sessions + .get(session_id) + .map(|session| session.thread.clone()) } - fn prompt( + fn run_turn( &self, - params: acp::PromptRequest, + session_id: acp::SessionId, cx: &mut App, + f: impl 'static + + FnOnce(Entity, &mut App) -> Result>>, ) -> Task> { - let session_id = params.session_id.clone(); - let agent = self.0.clone(); - log::info!("Received prompt request for session: {}", session_id); - log::debug!("Prompt blocks count: {}", params.prompt.len()); + let Some((thread, acp_thread)) = self.0.update(cx, |agent, _cx| { + agent + .sessions + .get_mut(&session_id) + .map(|s| (s.thread.clone(), s.acp_thread.clone())) + }) else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + log::debug!("Found session for: {}", session_id); - cx.spawn(async move |cx| { - // Get session - let (thread, acp_thread) = agent - .update(cx, |agent, _| { - agent - .sessions - .get_mut(&session_id) - .map(|s| (s.thread.clone(), s.acp_thread.clone())) - })? - .ok_or_else(|| { - log::error!("Session not found: {}", session_id); - anyhow::anyhow!("Session not found") - })?; - log::debug!("Found session for: {}", session_id); - - // Convert prompt to message - let message = convert_prompt_to_message(params.prompt); - log::info!("Converted prompt to message: {} chars", message.len()); - log::debug!("Message content: {}", message); - - // Get model using the ModelSelector capability (always available for agent2) - // Get the selected model from the thread directly - let model = thread.read_with(cx, |thread, _| thread.selected_model.clone())?; - - // Send to thread - log::info!("Sending message to thread with model: {:?}", model.name()); - let mut response_stream = - thread.update(cx, |thread, cx| thread.send(model, message, cx))?; + let response_stream = match f(thread, cx) { + Ok(stream) => stream, + Err(err) => return Task::ready(Err(err)), + }; + Self::handle_thread_events(response_stream, acp_thread, cx) + } + fn handle_thread_events( + mut events: mpsc::UnboundedReceiver>, + acp_thread: WeakEntity, + cx: &App, + ) -> Task> { + cx.spawn(async move |cx| { // Handle response stream and forward to session.acp_thread - while let Some(result) = response_stream.next().await { + while let Some(result) = events.next().await { match result { Ok(event) => { log::trace!("Received completion event: {:?}", event); match event { - AgentResponseEvent::Text(text) => { + ThreadEvent::UserMessage(message) => { + acp_thread.update(cx, |thread, cx| { + for content in message.content { + thread.push_user_content_block( + Some(message.id.clone()), + content.into(), + cx, + ); + } + })?; + } + ThreadEvent::AgentText(text) => { acp_thread.update(cx, |thread, cx| { - thread.handle_session_update( - acp::SessionUpdate::AgentMessageChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - }), - }, + thread.push_assistant_content_block( + acp::ContentBlock::Text(acp::TextContent { + text, + annotations: None, + }), + false, cx, ) - })??; + })?; } - AgentResponseEvent::Thinking(text) => { + ThreadEvent::AgentThinking(text) => { acp_thread.update(cx, |thread, cx| { - thread.handle_session_update( - acp::SessionUpdate::AgentThoughtChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - }), - }, + thread.push_assistant_content_block( + acp::ContentBlock::Text(acp::TextContent { + text, + annotations: None, + }), + true, cx, ) - })??; + })?; } - AgentResponseEvent::ToolCallAuthorization(ToolCallAuthorization { + ThreadEvent::ToolCallAuthorization(ToolCallAuthorization { tool_call, options, response, @@ -535,10 +693,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection { thread.request_tool_call_authorization(tool_call, options, cx) })?; cx.background_spawn(async move { - if let Some(option) = recv - .await - .context("authorization sender was dropped") - .log_err() + if let Some(recv) = recv.log_err() + && let Some(option) = recv + .await + .context("authorization sender was dropped") + .log_err() { response .send(option) @@ -548,23 +707,26 @@ impl acp_thread::AgentConnection for NativeAgentConnection { }) .detach(); } - AgentResponseEvent::ToolCall(tool_call) => { + ThreadEvent::ToolCall(tool_call) => { acp_thread.update(cx, |thread, cx| { - thread.handle_session_update( - acp::SessionUpdate::ToolCall(tool_call), - cx, - ) + thread.upsert_tool_call(tool_call, cx) })??; } - AgentResponseEvent::ToolCallUpdate(tool_call_update) => { + ThreadEvent::ToolCallUpdate(update) => { acp_thread.update(cx, |thread, cx| { - thread.handle_session_update( - acp::SessionUpdate::ToolCallUpdate(tool_call_update), - cx, - ) + thread.update_tool_call(update, cx) })??; } - AgentResponseEvent::Stop(stop_reason) => { + ThreadEvent::TitleUpdate(title) => { + acp_thread + .update(cx, |thread, cx| thread.update_title(title, cx))??; + } + ThreadEvent::Retry(status) => { + acp_thread.update(cx, |thread, cx| { + thread.update_retry_status(status, cx) + })?; + } + ThreadEvent::Stop(stop_reason) => { log::debug!("Assistant message complete: {:?}", stop_reason); return Ok(acp::PromptResponse { stop_reason }); } @@ -572,8 +734,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection { } Err(e) => { log::error!("Error in model response stream: {:?}", e); - // TODO: Consider sending an error message to the UI - break; + return Err(e); } } } @@ -584,57 +745,300 @@ impl acp_thread::AgentConnection for NativeAgentConnection { }) }) } +} + +impl AgentModelSelector for NativeAgentConnection { + fn list_models(&self, cx: &mut App) -> Task> { + log::debug!("NativeAgentConnection::list_models called"); + let list = self.0.read(cx).models.model_list.clone(); + Task::ready(if list.is_empty() { + Err(anyhow::anyhow!("No models available")) + } else { + Ok(list) + }) + } + + fn select_model( + &self, + session_id: acp::SessionId, + model_id: acp_thread::AgentModelId, + cx: &mut App, + ) -> Task> { + log::info!("Setting model for session {}: {}", session_id, model_id); + let Some(thread) = self + .0 + .read(cx) + .sessions + .get(&session_id) + .map(|session| session.thread.clone()) + else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + + let Some(model) = self.0.read(cx).models.model_from_id(&model_id) else { + return Task::ready(Err(anyhow!("Invalid model ID {}", model_id))); + }; + + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + + update_settings_file::( + self.0.read(cx).fs.clone(), + cx, + move |settings, _cx| { + settings.set_model(model); + }, + ); + + Task::ready(Ok(())) + } + + fn selected_model( + &self, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + let session_id = session_id.clone(); + + let Some(thread) = self + .0 + .read(cx) + .sessions + .get(&session_id) + .map(|session| session.thread.clone()) + else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + let Some(model) = thread.read(cx).model() else { + return Task::ready(Err(anyhow!("Model not found"))); + }; + let Some(provider) = LanguageModelRegistry::read_global(cx).provider(&model.provider_id()) + else { + return Task::ready(Err(anyhow!("Provider not found"))); + }; + Task::ready(Ok(LanguageModels::map_language_model_to_info( + model, &provider, + ))) + } + + fn watch(&self, cx: &mut App) -> watch::Receiver<()> { + self.0.read(cx).models.watch() + } +} + +impl acp_thread::AgentConnection for NativeAgentConnection { + fn new_thread( + self: Rc, + project: Entity, + cwd: &Path, + cx: &mut App, + ) -> Task>> { + let agent = self.0.clone(); + log::info!("Creating new thread for project at: {:?}", cwd); + + cx.spawn(async move |cx| { + log::debug!("Starting thread creation in async context"); + + let action_log = cx.new(|_cx| ActionLog::new(project.clone()))?; + // Create Thread + let thread = agent.update( + cx, + |agent, cx: &mut gpui::Context| -> Result<_> { + // Fetch default model from registry settings + let registry = LanguageModelRegistry::read_global(cx); + // Log available models for debugging + let available_count = registry.available_models(cx).count(); + log::debug!("Total available models: {}", available_count); + + let default_model = registry.default_model().and_then(|default_model| { + agent + .models + .model_from_id(&LanguageModels::model_id(&default_model.model)) + }); + + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + agent.project_context.clone(), + agent.context_server_registry.clone(), + action_log.clone(), + agent.templates.clone(), + default_model, + cx, + ) + }); + + Ok(thread) + }, + )??; + agent.update(cx, |agent, cx| agent.register_session(thread, cx)) + }) + } + + fn auth_methods(&self) -> &[acp::AuthMethod] { + &[] // No auth for in-process + } + + fn authenticate(&self, _method: acp::AuthMethodId, _cx: &mut App) -> Task> { + Task::ready(Ok(())) + } + + fn model_selector(&self) -> Option> { + Some(Rc::new(self.clone()) as Rc) + } + + fn prompt( + &self, + id: Option, + params: acp::PromptRequest, + cx: &mut App, + ) -> Task> { + let id = id.expect("UserMessageId is required"); + let session_id = params.session_id.clone(); + log::info!("Received prompt request for session: {}", session_id); + log::debug!("Prompt blocks count: {}", params.prompt.len()); + + self.run_turn(session_id, cx, |thread, cx| { + let content: Vec = params + .prompt + .into_iter() + .map(Into::into) + .collect::>(); + log::info!("Converted prompt to message: {} chars", content.len()); + log::debug!("Message id: {:?}", id); + log::debug!("Message content: {:?}", content); + + thread.update(cx, |thread, cx| thread.send(id, content, cx)) + }) + } + + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + acp::PromptCapabilities { + image: true, + audio: false, + embedded_context: true, + } + } + + fn resume( + &self, + session_id: &acp::SessionId, + _cx: &mut App, + ) -> Option> { + Some(Rc::new(NativeAgentSessionResume { + connection: self.clone(), + session_id: session_id.clone(), + }) as _) + } fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { log::info!("Cancelling on session: {}", session_id); self.0.update(cx, |agent, cx| { if let Some(agent) = agent.sessions.get(session_id) { - agent.thread.update(cx, |thread, _cx| thread.cancel()); + agent.thread.update(cx, |thread, cx| thread.cancel(cx)); } }); } + + fn session_editor( + &self, + session_id: &agent_client_protocol::SessionId, + cx: &mut App, + ) -> Option> { + self.0.update(cx, |agent, _cx| { + agent.sessions.get(session_id).map(|session| { + Rc::new(NativeAgentSessionEditor { + thread: session.thread.clone(), + acp_thread: session.acp_thread.clone(), + }) as _ + }) + }) + } + + fn telemetry(&self) -> Option> { + Some(Rc::new(self.clone()) as Rc) + } + + fn into_any(self: Rc) -> Rc { + self + } } -/// Convert ACP content blocks to a message string -fn convert_prompt_to_message(blocks: Vec) -> String { - log::debug!("Converting {} content blocks to message", blocks.len()); - let mut message = String::new(); +impl acp_thread::AgentTelemetry for NativeAgentConnection { + fn agent_name(&self) -> String { + "Zed".into() + } - for block in blocks { - match block { - acp::ContentBlock::Text(text) => { - log::trace!("Processing text block: {} chars", text.text.len()); - message.push_str(&text.text); - } - acp::ContentBlock::ResourceLink(link) => { - log::trace!("Processing resource link: {}", link.uri); - message.push_str(&format!(" @{} ", link.uri)); - } - acp::ContentBlock::Image(_) => { - log::trace!("Processing image block"); - message.push_str(" [image] "); - } - acp::ContentBlock::Audio(_) => { - log::trace!("Processing audio block"); - message.push_str(" [audio] "); - } - acp::ContentBlock::Resource(resource) => { - log::trace!("Processing resource block: {:?}", resource.resource); - message.push_str(&format!(" [resource: {:?}] ", resource.resource)); + fn thread_data( + &self, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + let Some(session) = self.0.read(cx).sessions.get(session_id) else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + + let task = session.thread.read(cx).to_db(cx); + cx.background_spawn(async move { + serde_json::to_value(task.await).context("Failed to serialize thread") + }) + } +} + +struct NativeAgentSessionEditor { + thread: Entity, + acp_thread: WeakEntity, +} + +impl acp_thread::AgentSessionEditor for NativeAgentSessionEditor { + fn truncate(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task> { + match self.thread.update(cx, |thread, cx| { + thread.truncate(message_id.clone(), cx)?; + Ok(thread.latest_token_usage()) + }) { + Ok(usage) => { + self.acp_thread + .update(cx, |thread, cx| { + thread.update_token_usage(usage, cx); + }) + .ok(); + Task::ready(Ok(())) } + Err(error) => Task::ready(Err(error)), } } +} - message +struct NativeAgentSessionResume { + connection: NativeAgentConnection, + session_id: acp::SessionId, +} + +impl acp_thread::AgentSessionResume for NativeAgentSessionResume { + fn run(&self, cx: &mut App) -> Task> { + self.connection + .run_turn(self.session_id.clone(), cx, |thread, cx| { + thread.update(cx, |thread, cx| thread.resume(cx)) + }) + } } #[cfg(test)] mod tests { + use crate::HistoryEntryId; + use super::*; + use acp_thread::{ + AgentConnection, AgentModelGroupName, AgentModelId, AgentModelInfo, MentionUri, + }; use fs::FakeFs; use gpui::TestAppContext; + use indoc::indoc; + use language_model::fake_provider::FakeLanguageModel; use serde_json::json; use settings::SettingsStore; + use util::path; #[gpui::test] async fn test_maintaining_project_context(cx: &mut TestAppContext) { @@ -648,11 +1052,20 @@ mod tests { ) .await; let project = Project::test(fs.clone(), [], cx).await; - let agent = NativeAgent::new(project.clone(), Templates::new(), None, &mut cx.to_async()) - .await - .unwrap(); - agent.read_with(cx, |agent, _| { - assert_eq!(agent.project_context.borrow().worktrees, vec![]) + let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let agent = NativeAgent::new( + project.clone(), + history_store, + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + agent.read_with(cx, |agent, cx| { + assert_eq!(agent.project_context.read(cx).worktrees, vec![]) }); let worktree = project @@ -660,9 +1073,9 @@ mod tests { .await .unwrap(); cx.run_until_parked(); - agent.read_with(cx, |agent, _| { + agent.read_with(cx, |agent, cx| { assert_eq!( - agent.project_context.borrow().worktrees, + agent.project_context.read(cx).worktrees, vec![WorktreeContext { root_name: "a".into(), abs_path: Path::new("/a").into(), @@ -677,7 +1090,7 @@ mod tests { agent.read_with(cx, |agent, cx| { let rules_entry = worktree.read(cx).entry_for_path(".rules").unwrap(); assert_eq!( - agent.project_context.borrow().worktrees, + agent.project_context.read(cx).worktrees, vec![WorktreeContext { root_name: "a".into(), abs_path: Path::new("/a").into(), @@ -691,13 +1104,285 @@ mod tests { }); } + #[gpui::test] + async fn test_listing_models(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({ "a": {} })).await; + let project = Project::test(fs.clone(), [], cx).await; + let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let connection = NativeAgentConnection( + NativeAgent::new( + project.clone(), + history_store, + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(), + ); + + let models = cx.update(|cx| connection.list_models(cx)).await.unwrap(); + + let acp_thread::AgentModelList::Grouped(models) = models else { + panic!("Unexpected model group"); + }; + assert_eq!( + models, + IndexMap::from_iter([( + AgentModelGroupName("Fake".into()), + vec![AgentModelInfo { + id: AgentModelId("fake/fake".into()), + name: "Fake".into(), + icon: Some(ui::IconName::ZedAssistant), + }] + )]) + ); + } + + #[gpui::test] + async fn test_model_selection_persists_to_settings(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.create_dir(paths::settings_file().parent().unwrap()) + .await + .unwrap(); + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "default_model": { + "provider": "foo", + "model": "bar" + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + let project = Project::test(fs.clone(), [], cx).await; + + let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + + // Create the agent and connection + let agent = NativeAgent::new( + project.clone(), + history_store, + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + let connection = NativeAgentConnection(agent.clone()); + + // Create a thread/session + let acp_thread = cx + .update(|cx| { + Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx) + }) + .await + .unwrap(); + + let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); + + // Select a model + let model_id = AgentModelId("fake/fake".into()); + cx.update(|cx| connection.select_model(session_id.clone(), model_id.clone(), cx)) + .await + .unwrap(); + + // Verify the thread has the selected model + agent.read_with(cx, |agent, _| { + let session = agent.sessions.get(&session_id).unwrap(); + session.thread.read_with(cx, |thread, _| { + assert_eq!(thread.model().unwrap().id().0, "fake"); + }); + }); + + cx.run_until_parked(); + + // Verify settings file was updated + let settings_content = fs.load(paths::settings_file()).await.unwrap(); + let settings_json: serde_json::Value = serde_json::from_str(&settings_content).unwrap(); + + // Check that the agent settings contain the selected model + assert_eq!( + settings_json["agent"]["default_model"]["model"], + json!("fake") + ); + assert_eq!( + settings_json["agent"]["default_model"]["provider"], + json!("fake") + ); + } + + #[gpui::test] + #[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows + async fn test_save_load_thread(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "b.md": "Lorem" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let agent = NativeAgent::new( + project.clone(), + history_store.clone(), + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_thread(project.clone(), Path::new(""), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + // Ensure empty threads are not saved, even if they get mutated. + let model = Arc::new(FakeLanguageModel::default()); + let summary_model = Arc::new(FakeLanguageModel::default()); + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + thread.set_summarization_model(Some(summary_model.clone()), cx); + }); + cx.run_until_parked(); + assert_eq!(history_entries(&history_store, cx), vec![]); + + let send = acp_thread.update(cx, |thread, cx| { + thread.send( + vec![ + "What does ".into(), + acp::ContentBlock::ResourceLink(acp::ResourceLink { + name: "b.md".into(), + uri: MentionUri::File { + abs_path: path!("/a/b.md").into(), + } + .to_uri() + .to_string(), + annotations: None, + description: None, + mime_type: None, + size: None, + title: None, + }), + " mean?".into(), + ], + cx, + ) + }); + let send = cx.foreground_executor().spawn(send); + cx.run_until_parked(); + + model.send_last_completion_stream_text_chunk("Lorem."); + model.end_last_completion_stream(); + cx.run_until_parked(); + summary_model.send_last_completion_stream_text_chunk("Explaining /a/b.md"); + summary_model.end_last_completion_stream(); + + send.await.unwrap(); + acp_thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User + + What does [@b.md](file:///a/b.md) mean? + + ## Assistant + + Lorem. + + "} + ) + }); + + // Drop the ACP thread, which should cause the session to be dropped as well. + cx.update(|_| { + drop(thread); + drop(acp_thread); + }); + agent.read_with(cx, |agent, _| { + assert_eq!(agent.sessions.keys().cloned().collect::>(), []); + }); + + // Ensure the thread can be reloaded from disk. + assert_eq!( + history_entries(&history_store, cx), + vec![( + HistoryEntryId::AcpThread(session_id.clone()), + "Explaining /a/b.md".into() + )] + ); + let acp_thread = agent + .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .await + .unwrap(); + acp_thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc! {" + ## User + + What does [@b.md](file:///a/b.md) mean? + + ## Assistant + + Lorem. + + "} + ) + }); + } + + fn history_entries( + history: &Entity, + cx: &mut TestAppContext, + ) -> Vec<(HistoryEntryId, String)> { + history.read_with(cx, |history, cx| { + history + .entries(cx) + .iter() + .map(|e| (e.id(), e.title().to_string())) + .collect::>() + }) + } + fn init_test(cx: &mut TestAppContext) { env_logger::try_init().ok(); cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); Project::init_settings(cx); + agent_settings::init(cx); language::init(cx); + LanguageModelRegistry::test(cx); }); } } diff --git a/crates/agent2/src/agent2.rs b/crates/agent2/src/agent2.rs index db743c84296f008d9b47317096525cd816460a87..1fc9c1cb956d1676c42713b5d9bb2a0b51e8ac90 100644 --- a/crates/agent2/src/agent2.rs +++ b/crates/agent2/src/agent2.rs @@ -1,13 +1,19 @@ mod agent; +mod db; +mod history_store; mod native_agent_server; mod templates; mod thread; +mod tool_schema; mod tools; #[cfg(test)] mod tests; pub use agent::*; +pub use db::*; +pub use history_store::*; pub use native_agent_server::NativeAgentServer; +pub use templates::*; pub use thread::*; pub use tools::*; diff --git a/crates/agent2/src/db.rs b/crates/agent2/src/db.rs new file mode 100644 index 0000000000000000000000000000000000000000..1b88955a241945996be911a337035a970c8b4026 --- /dev/null +++ b/crates/agent2/src/db.rs @@ -0,0 +1,488 @@ +use crate::{AgentMessage, AgentMessageContent, UserMessage, UserMessageContent}; +use acp_thread::UserMessageId; +use agent::{thread::DetailedSummaryState, thread_store}; +use agent_client_protocol as acp; +use agent_settings::{AgentProfileId, CompletionMode}; +use anyhow::{Result, anyhow}; +use chrono::{DateTime, Utc}; +use collections::{HashMap, IndexMap}; +use futures::{FutureExt, future::Shared}; +use gpui::{BackgroundExecutor, Global, Task}; +use indoc::indoc; +use parking_lot::Mutex; +use serde::{Deserialize, Serialize}; +use sqlez::{ + bindable::{Bind, Column}, + connection::Connection, + statement::Statement, +}; +use std::sync::Arc; +use ui::{App, SharedString}; + +pub type DbMessage = crate::Message; +pub type DbSummary = DetailedSummaryState; +pub type DbLanguageModel = thread_store::SerializedLanguageModel; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DbThreadMetadata { + pub id: acp::SessionId, + #[serde(alias = "summary")] + pub title: SharedString, + pub updated_at: DateTime, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DbThread { + pub title: SharedString, + pub messages: Vec, + pub updated_at: DateTime, + #[serde(default)] + pub detailed_summary: Option, + #[serde(default)] + pub initial_project_snapshot: Option>, + #[serde(default)] + pub cumulative_token_usage: language_model::TokenUsage, + #[serde(default)] + pub request_token_usage: HashMap, + #[serde(default)] + pub model: Option, + #[serde(default)] + pub completion_mode: Option, + #[serde(default)] + pub profile: Option, +} + +impl DbThread { + pub const VERSION: &'static str = "0.3.0"; + + pub fn from_json(json: &[u8]) -> Result { + let saved_thread_json = serde_json::from_slice::(json)?; + match saved_thread_json.get("version") { + Some(serde_json::Value::String(version)) => match version.as_str() { + Self::VERSION => Ok(serde_json::from_value(saved_thread_json)?), + _ => Self::upgrade_from_agent_1(agent::SerializedThread::from_json(json)?), + }, + _ => Self::upgrade_from_agent_1(agent::SerializedThread::from_json(json)?), + } + } + + fn upgrade_from_agent_1(thread: agent::SerializedThread) -> Result { + let mut messages = Vec::new(); + let mut request_token_usage = HashMap::default(); + + let mut last_user_message_id = None; + for (ix, msg) in thread.messages.into_iter().enumerate() { + let message = match msg.role { + language_model::Role::User => { + let mut content = Vec::new(); + + // Convert segments to content + for segment in msg.segments { + match segment { + thread_store::SerializedMessageSegment::Text { text } => { + content.push(UserMessageContent::Text(text)); + } + thread_store::SerializedMessageSegment::Thinking { text, .. } => { + // User messages don't have thinking segments, but handle gracefully + content.push(UserMessageContent::Text(text)); + } + thread_store::SerializedMessageSegment::RedactedThinking { .. } => { + // User messages don't have redacted thinking, skip. + } + } + } + + // If no content was added, add context as text if available + if content.is_empty() && !msg.context.is_empty() { + content.push(UserMessageContent::Text(msg.context)); + } + + let id = UserMessageId::new(); + last_user_message_id = Some(id.clone()); + + crate::Message::User(UserMessage { + // MessageId from old format can't be meaningfully converted, so generate a new one + id, + content, + }) + } + language_model::Role::Assistant => { + let mut content = Vec::new(); + + // Convert segments to content + for segment in msg.segments { + match segment { + thread_store::SerializedMessageSegment::Text { text } => { + content.push(AgentMessageContent::Text(text)); + } + thread_store::SerializedMessageSegment::Thinking { + text, + signature, + } => { + content.push(AgentMessageContent::Thinking { text, signature }); + } + thread_store::SerializedMessageSegment::RedactedThinking { data } => { + content.push(AgentMessageContent::RedactedThinking(data)); + } + } + } + + // Convert tool uses + let mut tool_names_by_id = HashMap::default(); + for tool_use in msg.tool_uses { + tool_names_by_id.insert(tool_use.id.clone(), tool_use.name.clone()); + content.push(AgentMessageContent::ToolUse( + language_model::LanguageModelToolUse { + id: tool_use.id, + name: tool_use.name.into(), + raw_input: serde_json::to_string(&tool_use.input) + .unwrap_or_default(), + input: tool_use.input, + is_input_complete: true, + }, + )); + } + + // Convert tool results + let mut tool_results = IndexMap::default(); + for tool_result in msg.tool_results { + let name = tool_names_by_id + .remove(&tool_result.tool_use_id) + .unwrap_or_else(|| SharedString::from("unknown")); + tool_results.insert( + tool_result.tool_use_id.clone(), + language_model::LanguageModelToolResult { + tool_use_id: tool_result.tool_use_id, + tool_name: name.into(), + is_error: tool_result.is_error, + content: tool_result.content, + output: tool_result.output, + }, + ); + } + + if let Some(last_user_message_id) = &last_user_message_id + && let Some(token_usage) = thread.request_token_usage.get(ix).copied() + { + request_token_usage.insert(last_user_message_id.clone(), token_usage); + } + + crate::Message::Agent(AgentMessage { + content, + tool_results, + }) + } + language_model::Role::System => { + // Skip system messages as they're not supported in the new format + continue; + } + }; + + messages.push(message); + } + + Ok(Self { + title: thread.summary, + messages, + updated_at: thread.updated_at, + detailed_summary: match thread.detailed_summary_state { + DetailedSummaryState::NotGenerated | DetailedSummaryState::Generating { .. } => { + None + } + DetailedSummaryState::Generated { text, .. } => Some(text), + }, + initial_project_snapshot: thread.initial_project_snapshot, + cumulative_token_usage: thread.cumulative_token_usage, + request_token_usage, + model: thread.model, + completion_mode: thread.completion_mode, + profile: thread.profile, + }) + } +} + +pub static ZED_STATELESS: std::sync::LazyLock = + std::sync::LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty())); + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum DataType { + #[serde(rename = "json")] + Json, + #[serde(rename = "zstd")] + Zstd, +} + +impl Bind for DataType { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + let value = match self { + DataType::Json => "json", + DataType::Zstd => "zstd", + }; + value.bind(statement, start_index) + } +} + +impl Column for DataType { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let (value, next_index) = String::column(statement, start_index)?; + let data_type = match value.as_str() { + "json" => DataType::Json, + "zstd" => DataType::Zstd, + _ => anyhow::bail!("Unknown data type: {}", value), + }; + Ok((data_type, next_index)) + } +} + +pub(crate) struct ThreadsDatabase { + executor: BackgroundExecutor, + connection: Arc>, +} + +struct GlobalThreadsDatabase(Shared, Arc>>>); + +impl Global for GlobalThreadsDatabase {} + +impl ThreadsDatabase { + pub fn connect(cx: &mut App) -> Shared, Arc>>> { + if cx.has_global::() { + return cx.global::().0.clone(); + } + let executor = cx.background_executor().clone(); + let task = executor + .spawn({ + let executor = executor.clone(); + async move { + match ThreadsDatabase::new(executor) { + Ok(db) => Ok(Arc::new(db)), + Err(err) => Err(Arc::new(err)), + } + } + }) + .shared(); + + cx.set_global(GlobalThreadsDatabase(task.clone())); + task + } + + pub fn new(executor: BackgroundExecutor) -> Result { + let connection = if *ZED_STATELESS || cfg!(any(feature = "test-support", test)) { + Connection::open_memory(Some("THREAD_FALLBACK_DB")) + } else { + let threads_dir = paths::data_dir().join("threads"); + std::fs::create_dir_all(&threads_dir)?; + let sqlite_path = threads_dir.join("threads.db"); + Connection::open_file(&sqlite_path.to_string_lossy()) + }; + + connection.exec(indoc! {" + CREATE TABLE IF NOT EXISTS threads ( + id TEXT PRIMARY KEY, + summary TEXT NOT NULL, + updated_at TEXT NOT NULL, + data_type TEXT NOT NULL, + data BLOB NOT NULL + ) + "})?() + .map_err(|e| anyhow!("Failed to create threads table: {}", e))?; + + let db = Self { + executor, + connection: Arc::new(Mutex::new(connection)), + }; + + Ok(db) + } + + fn save_thread_sync( + connection: &Arc>, + id: acp::SessionId, + thread: DbThread, + ) -> Result<()> { + const COMPRESSION_LEVEL: i32 = 3; + + #[derive(Serialize)] + struct SerializedThread { + #[serde(flatten)] + thread: DbThread, + version: &'static str, + } + + let title = thread.title.to_string(); + let updated_at = thread.updated_at.to_rfc3339(); + let json_data = serde_json::to_string(&SerializedThread { + thread, + version: DbThread::VERSION, + })?; + + let connection = connection.lock(); + + let compressed = zstd::encode_all(json_data.as_bytes(), COMPRESSION_LEVEL)?; + let data_type = DataType::Zstd; + let data = compressed; + + let mut insert = connection.exec_bound::<(Arc, String, String, DataType, Vec)>(indoc! {" + INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?) + "})?; + + insert((id.0, title, updated_at, data_type, data))?; + + Ok(()) + } + + pub fn list_threads(&self) -> Task>> { + let connection = self.connection.clone(); + + self.executor.spawn(async move { + let connection = connection.lock(); + + let mut select = + connection.select_bound::<(), (Arc, String, String)>(indoc! {" + SELECT id, summary, updated_at FROM threads ORDER BY updated_at DESC + "})?; + + let rows = select(())?; + let mut threads = Vec::new(); + + for (id, summary, updated_at) in rows { + threads.push(DbThreadMetadata { + id: acp::SessionId(id), + title: summary.into(), + updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), + }); + } + + Ok(threads) + }) + } + + pub fn load_thread(&self, id: acp::SessionId) -> Task>> { + let connection = self.connection.clone(); + + self.executor.spawn(async move { + let connection = connection.lock(); + let mut select = connection.select_bound::, (DataType, Vec)>(indoc! {" + SELECT data_type, data FROM threads WHERE id = ? LIMIT 1 + "})?; + + let rows = select(id.0)?; + if let Some((data_type, data)) = rows.into_iter().next() { + let json_data = match data_type { + DataType::Zstd => { + let decompressed = zstd::decode_all(&data[..])?; + String::from_utf8(decompressed)? + } + DataType::Json => String::from_utf8(data)?, + }; + let thread = DbThread::from_json(json_data.as_bytes())?; + Ok(Some(thread)) + } else { + Ok(None) + } + }) + } + + pub fn save_thread(&self, id: acp::SessionId, thread: DbThread) -> Task> { + let connection = self.connection.clone(); + + self.executor + .spawn(async move { Self::save_thread_sync(&connection, id, thread) }) + } + + pub fn delete_thread(&self, id: acp::SessionId) -> Task> { + let connection = self.connection.clone(); + + self.executor.spawn(async move { + let connection = connection.lock(); + + let mut delete = connection.exec_bound::>(indoc! {" + DELETE FROM threads WHERE id = ? + "})?; + + delete(id.0)?; + + Ok(()) + }) + } +} + +#[cfg(test)] +mod tests { + + use super::*; + use agent::MessageSegment; + use agent::context::LoadedContext; + use client::Client; + use fs::FakeFs; + use gpui::AppContext; + use gpui::TestAppContext; + use http_client::FakeHttpClient; + use language_model::Role; + use project::Project; + use settings::SettingsStore; + + fn init_test(cx: &mut TestAppContext) { + env_logger::try_init().ok(); + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + Project::init_settings(cx); + language::init(cx); + + let http_client = FakeHttpClient::with_404_response(); + let clock = Arc::new(clock::FakeSystemClock::new()); + let client = Client::new(clock, http_client, cx); + agent::init(cx); + agent_settings::init(cx); + language_model::init(client, cx); + }); + } + + #[gpui::test] + async fn test_retrieving_old_thread(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + // Save a thread using the old agent. + let thread_store = cx.new(|cx| agent::ThreadStore::fake(project, cx)); + let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx)); + thread.update(cx, |thread, cx| { + thread.insert_message( + Role::User, + vec![MessageSegment::Text("Hey!".into())], + LoadedContext::default(), + vec![], + false, + cx, + ); + thread.insert_message( + Role::Assistant, + vec![MessageSegment::Text("How're you doing?".into())], + LoadedContext::default(), + vec![], + false, + cx, + ) + }); + thread_store + .update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx)) + .await + .unwrap(); + + // Open that same thread using the new agent. + let db = cx.update(ThreadsDatabase::connect).await.unwrap(); + let threads = db.list_threads().await.unwrap(); + assert_eq!(threads.len(), 1); + let thread = db + .load_thread(threads[0].id.clone()) + .await + .unwrap() + .unwrap(); + assert_eq!(thread.messages[0].to_markdown(), "## User\n\nHey!\n"); + assert_eq!( + thread.messages[1].to_markdown(), + "## Assistant\n\nHow're you doing?\n" + ); + } +} diff --git a/crates/agent2/src/history_store.rs b/crates/agent2/src/history_store.rs new file mode 100644 index 0000000000000000000000000000000000000000..78d83cc1d05a72d1499c697751d1dfa792143db3 --- /dev/null +++ b/crates/agent2/src/history_store.rs @@ -0,0 +1,362 @@ +use crate::{DbThreadMetadata, ThreadsDatabase}; +use acp_thread::MentionUri; +use agent_client_protocol as acp; +use anyhow::{Context as _, Result, anyhow}; +use assistant_context::{AssistantContext, SavedContextMetadata}; +use chrono::{DateTime, Utc}; +use db::kvp::KEY_VALUE_STORE; +use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*}; +use itertools::Itertools; +use paths::contexts_dir; +use serde::{Deserialize, Serialize}; +use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration}; +use ui::ElementId; +use util::ResultExt as _; + +const MAX_RECENTLY_OPENED_ENTRIES: usize = 6; +const RECENTLY_OPENED_THREADS_KEY: &str = "recent-agent-threads"; +const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50); + +const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread"); + +#[derive(Clone, Debug)] +pub enum HistoryEntry { + AcpThread(DbThreadMetadata), + TextThread(SavedContextMetadata), +} + +impl HistoryEntry { + pub fn updated_at(&self) -> DateTime { + match self { + HistoryEntry::AcpThread(thread) => thread.updated_at, + HistoryEntry::TextThread(context) => context.mtime.to_utc(), + } + } + + pub fn id(&self) -> HistoryEntryId { + match self { + HistoryEntry::AcpThread(thread) => HistoryEntryId::AcpThread(thread.id.clone()), + HistoryEntry::TextThread(context) => HistoryEntryId::TextThread(context.path.clone()), + } + } + + pub fn mention_uri(&self) -> MentionUri { + match self { + HistoryEntry::AcpThread(thread) => MentionUri::Thread { + id: thread.id.clone(), + name: thread.title.to_string(), + }, + HistoryEntry::TextThread(context) => MentionUri::TextThread { + path: context.path.as_ref().to_owned(), + name: context.title.to_string(), + }, + } + } + + pub fn title(&self) -> &SharedString { + match self { + HistoryEntry::AcpThread(thread) if thread.title.is_empty() => DEFAULT_TITLE, + HistoryEntry::AcpThread(thread) => &thread.title, + HistoryEntry::TextThread(context) => &context.title, + } + } +} + +/// Generic identifier for a history entry. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum HistoryEntryId { + AcpThread(acp::SessionId), + TextThread(Arc), +} + +impl Into for HistoryEntryId { + fn into(self) -> ElementId { + match self { + HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()), + HistoryEntryId::TextThread(path) => ElementId::Path(path), + } + } +} + +#[derive(Serialize, Deserialize, Debug)] +enum SerializedRecentOpen { + AcpThread(String), + TextThread(String), +} + +pub struct HistoryStore { + threads: Vec, + context_store: Entity, + recently_opened_entries: VecDeque, + _subscriptions: Vec, + _save_recently_opened_entries_task: Task<()>, +} + +impl HistoryStore { + pub fn new( + context_store: Entity, + cx: &mut Context, + ) -> Self { + let subscriptions = vec![cx.observe(&context_store, |_, _, cx| cx.notify())]; + + cx.spawn(async move |this, cx| { + let entries = Self::load_recently_opened_entries(cx).await; + this.update(cx, |this, cx| { + if let Some(entries) = entries.log_err() { + this.recently_opened_entries = entries; + } + + this.reload(cx); + }) + .ok(); + }) + .detach(); + + Self { + context_store, + recently_opened_entries: VecDeque::default(), + threads: Vec::default(), + _subscriptions: subscriptions, + _save_recently_opened_entries_task: Task::ready(()), + } + } + + pub fn thread_from_session_id(&self, session_id: &acp::SessionId) -> Option<&DbThreadMetadata> { + self.threads.iter().find(|thread| &thread.id == session_id) + } + + pub fn delete_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task> { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let database = database_future.await.map_err(|err| anyhow!(err))?; + database.delete_thread(id.clone()).await?; + this.update(cx, |this, cx| this.reload(cx)) + }) + } + + pub fn delete_text_thread( + &mut self, + path: Arc, + cx: &mut Context, + ) -> Task> { + self.context_store.update(cx, |context_store, cx| { + context_store.delete_local_context(path, cx) + }) + } + + pub fn load_text_thread( + &self, + path: Arc, + cx: &mut Context, + ) -> Task>> { + self.context_store.update(cx, |context_store, cx| { + context_store.open_local_context(path, cx) + }) + } + + pub fn reload(&self, cx: &mut Context) { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let threads = database_future + .await + .map_err(|err| anyhow!(err))? + .list_threads() + .await?; + + this.update(cx, |this, cx| { + if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES { + for thread in threads + .iter() + .take(MAX_RECENTLY_OPENED_ENTRIES - this.recently_opened_entries.len()) + .rev() + { + this.push_recently_opened_entry( + HistoryEntryId::AcpThread(thread.id.clone()), + cx, + ) + } + } + this.threads = threads; + cx.notify(); + }) + }) + .detach_and_log_err(cx); + } + + pub fn entries(&self, cx: &App) -> Vec { + let mut history_entries = Vec::new(); + + #[cfg(debug_assertions)] + if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { + return history_entries; + } + + history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread)); + history_entries.extend( + self.context_store + .read(cx) + .unordered_contexts() + .cloned() + .map(HistoryEntry::TextThread), + ); + + history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at())); + history_entries + } + + pub fn is_empty(&self, cx: &App) -> bool { + self.threads.is_empty() + && self + .context_store + .read(cx) + .unordered_contexts() + .next() + .is_none() + } + + pub fn recently_opened_entries(&self, cx: &App) -> Vec { + #[cfg(debug_assertions)] + if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { + return Vec::new(); + } + + let thread_entries = self.threads.iter().flat_map(|thread| { + self.recently_opened_entries + .iter() + .enumerate() + .flat_map(|(index, entry)| match entry { + HistoryEntryId::AcpThread(id) if &thread.id == id => { + Some((index, HistoryEntry::AcpThread(thread.clone()))) + } + _ => None, + }) + }); + + let context_entries = + self.context_store + .read(cx) + .unordered_contexts() + .flat_map(|context| { + self.recently_opened_entries + .iter() + .enumerate() + .flat_map(|(index, entry)| match entry { + HistoryEntryId::TextThread(path) if &context.path == path => { + Some((index, HistoryEntry::TextThread(context.clone()))) + } + _ => None, + }) + }); + + thread_entries + .chain(context_entries) + // optimization to halt iteration early + .take(self.recently_opened_entries.len()) + .sorted_unstable_by_key(|(index, _)| *index) + .map(|(_, entry)| entry) + .collect() + } + + fn save_recently_opened_entries(&mut self, cx: &mut Context) { + let serialized_entries = self + .recently_opened_entries + .iter() + .filter_map(|entry| match entry { + HistoryEntryId::TextThread(path) => path.file_name().map(|file| { + SerializedRecentOpen::TextThread(file.to_string_lossy().to_string()) + }), + HistoryEntryId::AcpThread(id) => { + Some(SerializedRecentOpen::AcpThread(id.to_string())) + } + }) + .collect::>(); + + self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| { + let content = serde_json::to_string(&serialized_entries).unwrap(); + cx.background_executor() + .timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE) + .await; + + if cfg!(any(feature = "test-support", test)) { + return; + } + KEY_VALUE_STORE + .write_kvp(RECENTLY_OPENED_THREADS_KEY.to_owned(), content) + .await + .log_err(); + }); + } + + fn load_recently_opened_entries(cx: &AsyncApp) -> Task>> { + cx.background_spawn(async move { + if cfg!(any(feature = "test-support", test)) { + anyhow::bail!("history store does not persist in tests"); + } + let json = KEY_VALUE_STORE + .read_kvp(RECENTLY_OPENED_THREADS_KEY)? + .unwrap_or("[]".to_string()); + let entries = serde_json::from_str::>(&json) + .context("deserializing persisted agent panel navigation history")? + .into_iter() + .take(MAX_RECENTLY_OPENED_ENTRIES) + .flat_map(|entry| match entry { + SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread( + acp::SessionId(id.as_str().into()), + )), + SerializedRecentOpen::TextThread(file_name) => Some( + HistoryEntryId::TextThread(contexts_dir().join(file_name).into()), + ), + }) + .collect(); + Ok(entries) + }) + } + + pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context) { + self.recently_opened_entries + .retain(|old_entry| old_entry != &entry); + self.recently_opened_entries.push_front(entry); + self.recently_opened_entries + .truncate(MAX_RECENTLY_OPENED_ENTRIES); + self.save_recently_opened_entries(cx); + } + + pub fn remove_recently_opened_thread(&mut self, id: acp::SessionId, cx: &mut Context) { + self.recently_opened_entries.retain( + |entry| !matches!(entry, HistoryEntryId::AcpThread(thread_id) if thread_id == &id), + ); + self.save_recently_opened_entries(cx); + } + + pub fn replace_recently_opened_text_thread( + &mut self, + old_path: &Path, + new_path: &Arc, + cx: &mut Context, + ) { + for entry in &mut self.recently_opened_entries { + match entry { + HistoryEntryId::TextThread(path) if path.as_ref() == old_path => { + *entry = HistoryEntryId::TextThread(new_path.clone()); + break; + } + _ => {} + } + } + self.save_recently_opened_entries(cx); + } + + pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context) { + self.recently_opened_entries + .retain(|old_entry| old_entry != entry); + self.save_recently_opened_entries(cx); + } + + pub fn recent_entries(&self, limit: usize, cx: &mut Context) -> Vec { + self.entries(cx).into_iter().take(limit).collect() + } +} diff --git a/crates/agent2/src/native_agent_server.rs b/crates/agent2/src/native_agent_server.rs index dd0188b54848903c9fdd5b56db5f44c3d76a84f4..ac5aa95c043e5128ba11f4f1f8930950e836474c 100644 --- a/crates/agent2/src/native_agent_server.rs +++ b/crates/agent2/src/native_agent_server.rs @@ -1,16 +1,25 @@ -use std::path::Path; -use std::rc::Rc; +use std::{any::Any, path::Path, rc::Rc, sync::Arc}; use agent_servers::AgentServer; use anyhow::Result; +use fs::Fs; use gpui::{App, Entity, Task}; use project::Project; use prompt_store::PromptStore; -use crate::{templates::Templates, NativeAgent, NativeAgentConnection}; +use crate::{HistoryStore, NativeAgent, NativeAgentConnection, templates::Templates}; #[derive(Clone)] -pub struct NativeAgentServer; +pub struct NativeAgentServer { + fs: Arc, + history: Entity, +} + +impl NativeAgentServer { + pub fn new(fs: Arc, history: Entity) -> Self { + Self { fs, history } + } +} impl AgentServer for NativeAgentServer { fn name(&self) -> &'static str { @@ -18,16 +27,15 @@ impl AgentServer for NativeAgentServer { } fn empty_state_headline(&self) -> &'static str { - "Native Agent" + "Welcome to the Agent Panel" } fn empty_state_message(&self) -> &'static str { - "How can I help you today?" + "" } fn logo(&self) -> ui::IconName { - // Using the ZedAssistant icon as it's the native built-in agent - ui::IconName::ZedAssistant + ui::IconName::ZedAgent } fn connect( @@ -41,6 +49,8 @@ impl AgentServer for NativeAgentServer { _root_dir ); let project = project.clone(); + let fs = self.fs.clone(); + let history = self.history.clone(); let prompt_store = PromptStore::global(cx); cx.spawn(async move |cx| { log::debug!("Creating templates for native agent"); @@ -48,7 +58,8 @@ impl AgentServer for NativeAgentServer { let prompt_store = prompt_store.await?; log::debug!("Creating native agent entity"); - let agent = NativeAgent::new(project, templates, Some(prompt_store), cx).await?; + let agent = + NativeAgent::new(project, history, templates, Some(prompt_store), fs, cx).await?; // Create the connection wrapper let connection = NativeAgentConnection(agent); @@ -57,4 +68,57 @@ impl AgentServer for NativeAgentServer { Ok(Rc::new(connection) as Rc) }) } + + fn into_any(self: Rc) -> Rc { + self + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use assistant_context::ContextStore; + use gpui::AppContext; + + agent_servers::e2e_tests::common_e2e_tests!( + async |fs, project, cx| { + let auth = cx.update(|cx| { + prompt_store::init(cx); + terminal::init(cx); + + let registry = language_model::LanguageModelRegistry::read_global(cx); + let auth = registry + .provider(&language_model::ANTHROPIC_PROVIDER_ID) + .unwrap() + .authenticate(cx); + + cx.spawn(async move |_| auth.await) + }); + + auth.await.unwrap(); + + cx.update(|cx| { + let registry = language_model::LanguageModelRegistry::global(cx); + + registry.update(cx, |registry, cx| { + registry.select_default_model( + Some(&language_model::SelectedModel { + provider: language_model::ANTHROPIC_PROVIDER_ID, + model: language_model::LanguageModelId("claude-sonnet-4-latest".into()), + }), + cx, + ); + }); + }); + + let history = cx.update(|cx| { + let context_store = cx.new(move |cx| ContextStore::fake(project.clone(), cx)); + cx.new(move |cx| HistoryStore::new(context_store, cx)) + }); + + NativeAgentServer::new(fs.clone(), history) + }, + allow_option_id = "allow" + ); } diff --git a/crates/agent2/src/templates.rs b/crates/agent2/src/templates.rs index a63f0ad206308130712b9481cfd7231eb0fd2696..72a8f6633cb7bb926580dbb4f9e65ec032162d93 100644 --- a/crates/agent2/src/templates.rs +++ b/crates/agent2/src/templates.rs @@ -62,7 +62,7 @@ fn contains( handlebars::RenderError::new("contains: missing or invalid query parameter") })?; - if list.contains(&query) { + if list.contains(query) { out.write("true")?; } diff --git a/crates/agent2/src/tests/mod.rs b/crates/agent2/src/tests/mod.rs index 7913f9a24cb988ffff507efbd6d6bdf38a008b66..3bd1be497ef3edcbbcf0429e75c956e70cba0770 100644 --- a/crates/agent2/src/tests/mod.rs +++ b/crates/agent2/src/tests/mod.rs @@ -1,27 +1,31 @@ use super::*; -use crate::templates::Templates; -use acp_thread::AgentConnection; +use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelList, UserMessageId}; +use action_log::ActionLog; use agent_client_protocol::{self as acp}; +use agent_settings::AgentProfileId; use anyhow::Result; -use assistant_tool::ActionLog; use client::{Client, UserStore}; -use fs::FakeFs; -use futures::channel::mpsc::UnboundedReceiver; -use gpui::{http_client::FakeHttpClient, AppContext, Entity, Task, TestAppContext}; +use fs::{FakeFs, Fs}; +use futures::{StreamExt, channel::mpsc::UnboundedReceiver}; +use gpui::{ + App, AppContext, Entity, Task, TestAppContext, UpdateGlobal, http_client::FakeHttpClient, +}; use indoc::indoc; use language_model::{ - fake_provider::FakeLanguageModel, LanguageModel, LanguageModelCompletionError, - LanguageModelCompletionEvent, LanguageModelId, LanguageModelRegistry, LanguageModelToolResult, - LanguageModelToolUse, MessageContent, Role, StopReason, + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, + LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequestMessage, + LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, StopReason, + fake_provider::FakeLanguageModel, }; +use pretty_assertions::assert_eq; use project::Project; use prompt_store::ProjectContext; use reqwest_client::ReqwestClient; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::json; -use smol::stream::StreamExt; -use std::{cell::RefCell, path::Path, rc::Rc, sync::Arc, time::Duration}; +use settings::SettingsStore; +use std::{path::Path, rc::Rc, sync::Arc, time::Duration}; use util::path; mod test_tools; @@ -30,19 +34,24 @@ use test_tools::*; #[gpui::test] #[ignore = "can't run on CI yet"] async fn test_echo(cx: &mut TestAppContext) { - let ThreadTest { model, thread, .. } = setup(cx, TestModel::Sonnet4).await; + let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; let events = thread .update(cx, |thread, cx| { - thread.send(model.clone(), "Testing: Reply with 'Hello'", cx) + thread.send(UserMessageId::new(), ["Testing: Reply with 'Hello'"], cx) }) + .unwrap() .collect() .await; thread.update(cx, |thread, _cx| { assert_eq!( - thread.messages().last().unwrap().content, - vec![MessageContent::Text("Hello".to_string())] - ); + thread.last_message().unwrap().to_markdown(), + indoc! {" + ## Assistant + + Hello + "} + ) }); assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); } @@ -50,28 +59,30 @@ async fn test_echo(cx: &mut TestAppContext) { #[gpui::test] #[ignore = "can't run on CI yet"] async fn test_thinking(cx: &mut TestAppContext) { - let ThreadTest { model, thread, .. } = setup(cx, TestModel::Sonnet4Thinking).await; + let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4Thinking).await; let events = thread .update(cx, |thread, cx| { thread.send( - model.clone(), - indoc! {" + UserMessageId::new(), + [indoc! {" Testing: Generate a thinking step where you just think the word 'Think', and have your final answer be 'Hello' - "}, + "}], cx, ) }) + .unwrap() .collect() .await; thread.update(cx, |thread, _cx| { assert_eq!( - thread.messages().last().unwrap().to_markdown(), + thread.last_message().unwrap().to_markdown(), indoc! {" - ## assistant + ## Assistant + Think Hello "} @@ -90,9 +101,15 @@ async fn test_system_prompt(cx: &mut TestAppContext) { } = setup(cx, TestModel::Fake).await; let fake_model = model.as_fake(); - project_context.borrow_mut().shell = "test-shell".into(); + project_context.update(cx, |project_context, _cx| { + project_context.shell = "test-shell".into() + }); thread.update(cx, |thread, _| thread.add_tool(EchoTool)); - thread.update(cx, |thread, cx| thread.send(model.clone(), "abc", cx)); + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["abc"], cx) + }) + .unwrap(); cx.run_until_parked(); let mut pending_completions = fake_model.pending_completions(); assert_eq!( @@ -119,21 +136,156 @@ async fn test_system_prompt(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_prompt_caching(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + // Send initial user message and verify it's cached + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Message 1"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + completion.messages[1..], + vec![LanguageModelRequestMessage { + role: Role::User, + content: vec!["Message 1".into()], + cache: true + }] + ); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text( + "Response to Message 1".into(), + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // Send another user message and verify only the latest is cached + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Message 2"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Message 1".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec!["Response to Message 1".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Message 2".into()], + cache: true + } + ] + ); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text( + "Response to Message 2".into(), + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // Simulate a tool call and verify that the latest tool result is cached + thread.update(cx, |thread, _| thread.add_tool(EchoTool)); + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Use the echo tool"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use = LanguageModelToolUse { + id: "tool_1".into(), + name: EchoTool.name().into(), + raw_input: json!({"text": "test"}).to_string(), + input: json!({"text": "test"}), + is_input_complete: true, + }; + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let completion = fake_model.pending_completions().pop().unwrap(); + let tool_result = LanguageModelToolResult { + tool_use_id: "tool_1".into(), + tool_name: EchoTool.name().into(), + is_error: false, + content: "test".into(), + output: Some("test".into()), + }; + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Message 1".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec!["Response to Message 1".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Message 2".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec!["Response to Message 2".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Use the echo tool".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::ToolUse(tool_use)], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(tool_result)], + cache: true + } + ] + ); +} + #[gpui::test] #[ignore = "can't run on CI yet"] async fn test_basic_tool_calls(cx: &mut TestAppContext) { - let ThreadTest { model, thread, .. } = setup(cx, TestModel::Sonnet4).await; + let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; // Test a tool call that's likely to complete *before* streaming stops. let events = thread .update(cx, |thread, cx| { thread.add_tool(EchoTool); thread.send( - model.clone(), - "Now test the echo tool with 'Hello'. Does it work? Say 'Yes' or 'No'.", + UserMessageId::new(), + ["Now test the echo tool with 'Hello'. Does it work? Say 'Yes' or 'No'."], cx, ) }) + .unwrap() .collect() .await; assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); @@ -144,49 +296,62 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) { thread.remove_tool(&AgentTool::name(&EchoTool)); thread.add_tool(DelayTool); thread.send( - model.clone(), - "Now call the delay tool with 200ms. When the timer goes off, then you echo the output of the tool.", + UserMessageId::new(), + [ + "Now call the delay tool with 200ms.", + "When the timer goes off, then you echo the output of the tool.", + ], cx, ) }) + .unwrap() .collect() .await; assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); thread.update(cx, |thread, _cx| { - assert!(thread - .messages() - .last() - .unwrap() - .content - .iter() - .any(|content| { - if let MessageContent::Text(text) = content { - text.contains("Ding") - } else { - false - } - })); + assert!( + thread + .last_message() + .unwrap() + .as_agent_message() + .unwrap() + .content + .iter() + .any(|content| { + if let AgentMessageContent::Text(text) = content { + text.contains("Ding") + } else { + false + } + }), + "{}", + thread.to_markdown() + ); }); } #[gpui::test] #[ignore = "can't run on CI yet"] async fn test_streaming_tool_calls(cx: &mut TestAppContext) { - let ThreadTest { model, thread, .. } = setup(cx, TestModel::Sonnet4).await; + let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; // Test a tool call that's likely to complete *before* streaming stops. - let mut events = thread.update(cx, |thread, cx| { - thread.add_tool(WordListTool); - thread.send(model.clone(), "Test the word_list tool.", cx) - }); + let mut events = thread + .update(cx, |thread, cx| { + thread.add_tool(WordListTool); + thread.send(UserMessageId::new(), ["Test the word_list tool."], cx) + }) + .unwrap(); let mut saw_partial_tool_use = false; while let Some(event) = events.next().await { - if let Ok(AgentResponseEvent::ToolCall(tool_call)) = event { + if let Ok(ThreadEvent::ToolCall(tool_call)) = event { thread.update(cx, |thread, _cx| { // Look for a tool use in the thread's last message - let last_content = thread.messages().last().unwrap().content.last().unwrap(); - if let MessageContent::ToolUse(last_tool_use) = last_content { + let message = thread.last_message().unwrap(); + let agent_message = message.as_agent_message().unwrap(); + let last_content = agent_message.content.last().unwrap(); + if let AgentMessageContent::ToolUse(last_tool_use) = last_content { assert_eq!(last_tool_use.name.as_ref(), "word_list"); if tool_call.status == acp::ToolCallStatus::Pending { if !last_tool_use.is_input_complete @@ -222,10 +387,12 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; let fake_model = model.as_fake(); - let mut events = thread.update(cx, |thread, cx| { - thread.add_tool(ToolRequiringPermission); - thread.send(model.clone(), "abc", cx) - }); + let mut events = thread + .update(cx, |thread, cx| { + thread.add_tool(ToolRequiringPermission); + thread.send(UserMessageId::new(), ["abc"], cx) + }) + .unwrap(); cx.run_until_parked(); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { @@ -268,14 +435,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { assert_eq!( message.content, vec![ - MessageContent::ToolResult(LanguageModelToolResult { + language_model::MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: tool_call_auth_1.tool_call.id.0.to_string().into(), tool_name: ToolRequiringPermission.name().into(), is_error: false, content: "Allowed".into(), - output: None + output: Some("Allowed".into()) }), - MessageContent::ToolResult(LanguageModelToolResult { + language_model::MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: tool_call_auth_2.tool_call.id.0.to_string().into(), tool_name: ToolRequiringPermission.name().into(), is_error: true, @@ -284,6 +451,67 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { }) ] ); + + // Simulate yet another tool call. + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_id_3".into(), + name: ToolRequiringPermission.name().into(), + raw_input: "{}".into(), + input: json!({}), + is_input_complete: true, + }, + )); + fake_model.end_last_completion_stream(); + + // Respond by always allowing tools. + let tool_call_auth_3 = next_tool_call_authorization(&mut events).await; + tool_call_auth_3 + .response + .send(tool_call_auth_3.options[0].id.clone()) + .unwrap(); + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + let message = completion.messages.last().unwrap(); + assert_eq!( + message.content, + vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_call_auth_3.tool_call.id.0.to_string().into(), + tool_name: ToolRequiringPermission.name().into(), + is_error: false, + content: "Allowed".into(), + output: Some("Allowed".into()) + } + )] + ); + + // Simulate a final tool call, ensuring we don't trigger authorization. + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_id_4".into(), + name: ToolRequiringPermission.name().into(), + raw_input: "{}".into(), + input: json!({}), + is_input_complete: true, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + let message = completion.messages.last().unwrap(); + assert_eq!( + message.content, + vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: "tool_id_4".into(), + tool_name: ToolRequiringPermission.name().into(), + is_error: false, + content: "Allowed".into(), + output: Some("Allowed".into()) + } + )] + ); } #[gpui::test] @@ -291,7 +519,11 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; let fake_model = model.as_fake(); - let mut events = thread.update(cx, |thread, cx| thread.send(model.clone(), "abc", cx)); + let mut events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["abc"], cx) + }) + .unwrap(); cx.run_until_parked(); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { @@ -307,28 +539,218 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) { let tool_call = expect_tool_call(&mut events).await; assert_eq!(tool_call.title, "nonexistent_tool"); assert_eq!(tool_call.status, acp::ToolCallStatus::Pending); - let update = expect_tool_call_update(&mut events).await; + let update = expect_tool_call_update_fields(&mut events).await; assert_eq!(update.fields.status, Some(acp::ToolCallStatus::Failed)); } -async fn expect_tool_call( - events: &mut UnboundedReceiver>, -) -> acp::ToolCall { +#[gpui::test] +async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let events = thread + .update(cx, |thread, cx| { + thread.add_tool(EchoTool); + thread.send(UserMessageId::new(), ["abc"], cx) + }) + .unwrap(); + cx.run_until_parked(); + let tool_use = LanguageModelToolUse { + id: "tool_id_1".into(), + name: EchoTool.name().into(), + raw_input: "{}".into(), + input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(), + is_input_complete: true, + }; + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + fake_model.end_last_completion_stream(); + + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + let tool_result = LanguageModelToolResult { + tool_use_id: "tool_id_1".into(), + tool_name: EchoTool.name().into(), + is_error: false, + content: "def".into(), + output: Some("def".into()), + }; + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["abc".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::ToolUse(tool_use.clone())], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(tool_result.clone())], + cache: true + }, + ] + ); + + // Simulate reaching tool use limit. + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::StatusUpdate( + cloud_llm_client::CompletionRequestStatus::ToolUseLimitReached, + )); + fake_model.end_last_completion_stream(); + let last_event = events.collect::>().await.pop().unwrap(); + assert!( + last_event + .unwrap_err() + .is::() + ); + + let events = thread.update(cx, |thread, cx| thread.resume(cx)).unwrap(); + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["abc".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::ToolUse(tool_use)], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(tool_result)], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Continue where you left off".into()], + cache: true + } + ] + ); + + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text("Done".into())); + fake_model.end_last_completion_stream(); + events.collect::>().await; + thread.read_with(cx, |thread, _cx| { + assert_eq!( + thread.last_message().unwrap().to_markdown(), + indoc! {" + ## Assistant + + Done + "} + ) + }); + + // Ensure we error if calling resume when tool use limit was *not* reached. + let error = thread + .update(cx, |thread, cx| thread.resume(cx)) + .unwrap_err(); + assert_eq!( + error.to_string(), + "can only resume after tool use limit is reached" + ) +} + +#[gpui::test] +async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let events = thread + .update(cx, |thread, cx| { + thread.add_tool(EchoTool); + thread.send(UserMessageId::new(), ["abc"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use = LanguageModelToolUse { + id: "tool_id_1".into(), + name: EchoTool.name().into(), + raw_input: "{}".into(), + input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(), + is_input_complete: true, + }; + let tool_result = LanguageModelToolResult { + tool_use_id: "tool_id_1".into(), + tool_name: EchoTool.name().into(), + is_error: false, + content: "def".into(), + output: Some("def".into()), + }; + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::StatusUpdate( + cloud_llm_client::CompletionRequestStatus::ToolUseLimitReached, + )); + fake_model.end_last_completion_stream(); + let last_event = events.collect::>().await.pop().unwrap(); + assert!( + last_event + .unwrap_err() + .is::() + ); + + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), vec!["ghi"], cx) + }) + .unwrap(); + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["abc".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::ToolUse(tool_use)], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(tool_result)], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec!["ghi".into()], + cache: true + } + ] + ); +} + +async fn expect_tool_call(events: &mut UnboundedReceiver>) -> acp::ToolCall { let event = events .next() .await .expect("no tool call authorization event received") .unwrap(); match event { - AgentResponseEvent::ToolCall(tool_call) => return tool_call, + ThreadEvent::ToolCall(tool_call) => tool_call, event => { panic!("Unexpected event {event:?}"); } } } -async fn expect_tool_call_update( - events: &mut UnboundedReceiver>, +async fn expect_tool_call_update_fields( + events: &mut UnboundedReceiver>, ) -> acp::ToolCallUpdate { let event = events .next() @@ -336,7 +758,7 @@ async fn expect_tool_call_update( .expect("no tool call authorization event received") .unwrap(); match event { - AgentResponseEvent::ToolCallUpdate(tool_call_update) => return tool_call_update, + ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields(update)) => update, event => { panic!("Unexpected event {event:?}"); } @@ -344,7 +766,7 @@ async fn expect_tool_call_update( } async fn next_tool_call_authorization( - events: &mut UnboundedReceiver>, + events: &mut UnboundedReceiver>, ) -> ToolCallAuthorization { loop { let event = events @@ -352,7 +774,7 @@ async fn next_tool_call_authorization( .await .expect("no tool call authorization event received") .unwrap(); - if let AgentResponseEvent::ToolCallAuthorization(tool_call_authorization) = event { + if let ThreadEvent::ToolCallAuthorization(tool_call_authorization) = event { let permission_kinds = tool_call_authorization .options .iter() @@ -374,18 +796,23 @@ async fn next_tool_call_authorization( #[gpui::test] #[ignore = "can't run on CI yet"] async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { - let ThreadTest { model, thread, .. } = setup(cx, TestModel::Sonnet4).await; + let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; // Test concurrent tool calls with different delay times let events = thread .update(cx, |thread, cx| { thread.add_tool(DelayTool); thread.send( - model.clone(), - "Call the delay tool twice in the same message. Once with 100ms. Once with 300ms. When both timers are complete, describe the outputs.", + UserMessageId::new(), + [ + "Call the delay tool twice in the same message.", + "Once with 100ms. Once with 300ms.", + "When both timers are complete, describe the outputs.", + ], cx, ) }) + .unwrap() .collect() .await; @@ -393,12 +820,13 @@ async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { assert_eq!(stop_reasons, vec![acp::StopReason::EndTurn]); thread.update(cx, |thread, _cx| { - let last_message = thread.messages().last().unwrap(); - let text = last_message + let last_message = thread.last_message().unwrap(); + let agent_message = last_message.as_agent_message().unwrap(); + let text = agent_message .content .iter() .filter_map(|content| { - if let MessageContent::Text(text) = content { + if let AgentMessageContent::Text(text) = content { Some(text.as_str()) } else { None @@ -411,83 +839,251 @@ async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { } #[gpui::test] -#[ignore = "can't run on CI yet"] -async fn test_cancellation(cx: &mut TestAppContext) { - let ThreadTest { model, thread, .. } = setup(cx, TestModel::Sonnet4).await; +async fn test_profiles(cx: &mut TestAppContext) { + let ThreadTest { + model, thread, fs, .. + } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); - let mut events = thread.update(cx, |thread, cx| { - thread.add_tool(InfiniteTool); + thread.update(cx, |thread, _cx| { + thread.add_tool(DelayTool); thread.add_tool(EchoTool); - thread.send( - model.clone(), - "Call the echo tool and then call the infinite tool, then explain their output", - cx, - ) + thread.add_tool(InfiniteTool); }); - // Wait until both tools are called. - let mut expected_tool_calls = vec!["echo", "infinite"]; - let mut echo_id = None; - let mut echo_completed = false; - while let Some(event) = events.next().await { - match event.unwrap() { - AgentResponseEvent::ToolCall(tool_call) => { - assert_eq!(tool_call.title, expected_tool_calls.remove(0)); - if tool_call.title == "echo" { - echo_id = Some(tool_call.id); - } - } - AgentResponseEvent::ToolCallUpdate(acp::ToolCallUpdate { - id, - fields: - acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::Completed), - .. + // Override profiles and wait for settings to be loaded. + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "profiles": { + "test-1": { + "name": "Test Profile 1", + "tools": { + EchoTool.name(): true, + DelayTool.name(): true, + } }, - }) if Some(&id) == echo_id.as_ref() => { - echo_completed = true; + "test-2": { + "name": "Test Profile 2", + "tools": { + InfiniteTool.name(): true, + } + } + } } - _ => {} - } - - if expected_tool_calls.is_empty() && echo_completed { - break; - } - } - - // Cancel the current send and ensure that the event stream is closed, even - // if one of the tools is still running. - thread.update(cx, |thread, _cx| thread.cancel()); - events.collect::>().await; + }) + .to_string() + .into_bytes(), + ) + .await; + cx.run_until_parked(); - // Ensure we can still send a new message after cancellation. - let events = thread + // Test that test-1 profile (default) has echo and delay tools + thread .update(cx, |thread, cx| { - thread.send(model.clone(), "Testing: reply with 'Hello' then stop.", cx) + thread.set_profile(AgentProfileId("test-1".into())); + thread.send(UserMessageId::new(), ["test"], cx) }) + .unwrap(); + cx.run_until_parked(); + + let mut pending_completions = fake_model.pending_completions(); + assert_eq!(pending_completions.len(), 1); + let completion = pending_completions.pop().unwrap(); + let tool_names: Vec = completion + .tools + .iter() + .map(|tool| tool.name.clone()) + .collect(); + assert_eq!(tool_names, vec![DelayTool.name(), EchoTool.name()]); + fake_model.end_last_completion_stream(); + + // Switch to test-2 profile, and verify that it has only the infinite tool. + thread + .update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("test-2".into())); + thread.send(UserMessageId::new(), ["test2"], cx) + }) + .unwrap(); + cx.run_until_parked(); + let mut pending_completions = fake_model.pending_completions(); + assert_eq!(pending_completions.len(), 1); + let completion = pending_completions.pop().unwrap(); + let tool_names: Vec = completion + .tools + .iter() + .map(|tool| tool.name.clone()) + .collect(); + assert_eq!(tool_names, vec![InfiniteTool.name()]); +} + +#[gpui::test] +#[ignore = "can't run on CI yet"] +async fn test_cancellation(cx: &mut TestAppContext) { + let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; + + let mut events = thread + .update(cx, |thread, cx| { + thread.add_tool(InfiniteTool); + thread.add_tool(EchoTool); + thread.send( + UserMessageId::new(), + ["Call the echo tool, then call the infinite tool, then explain their output"], + cx, + ) + }) + .unwrap(); + + // Wait until both tools are called. + let mut expected_tools = vec!["Echo", "Infinite Tool"]; + let mut echo_id = None; + let mut echo_completed = false; + while let Some(event) = events.next().await { + match event.unwrap() { + ThreadEvent::ToolCall(tool_call) => { + assert_eq!(tool_call.title, expected_tools.remove(0)); + if tool_call.title == "Echo" { + echo_id = Some(tool_call.id); + } + } + ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( + acp::ToolCallUpdate { + id, + fields: + acp::ToolCallUpdateFields { + status: Some(acp::ToolCallStatus::Completed), + .. + }, + }, + )) if Some(&id) == echo_id.as_ref() => { + echo_completed = true; + } + _ => {} + } + + if expected_tools.is_empty() && echo_completed { + break; + } + } + + // Cancel the current send and ensure that the event stream is closed, even + // if one of the tools is still running. + thread.update(cx, |thread, cx| thread.cancel(cx)); + let events = events.collect::>().await; + let last_event = events.last(); + assert!( + matches!( + last_event, + Some(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled))) + ), + "unexpected event {last_event:?}" + ); + + // Ensure we can still send a new message after cancellation. + let events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Testing: reply with 'Hello' then stop."], + cx, + ) + }) + .unwrap() .collect::>() .await; thread.update(cx, |thread, _cx| { + let message = thread.last_message().unwrap(); + let agent_message = message.as_agent_message().unwrap(); assert_eq!( - thread.messages().last().unwrap().content, - vec![MessageContent::Text("Hello".to_string())] + agent_message.content, + vec![AgentMessageContent::Text("Hello".to_string())] ); }); assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); } +#[gpui::test] +async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let events_1 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello 1"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hey 1!"); + cx.run_until_parked(); + + let events_2 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello 2"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hey 2!"); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + fake_model.end_last_completion_stream(); + + let events_1 = events_1.collect::>().await; + assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]); + let events_2 = events_2.collect::>().await; + assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); +} + +#[gpui::test] +async fn test_subsequent_successful_sends_dont_cancel(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let events_1 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello 1"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hey 1!"); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + fake_model.end_last_completion_stream(); + let events_1 = events_1.collect::>().await; + + let events_2 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello 2"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hey 2!"); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + fake_model.end_last_completion_stream(); + let events_2 = events_2.collect::>().await; + + assert_eq!(stop_events(events_1), vec![acp::StopReason::EndTurn]); + assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); +} + #[gpui::test] async fn test_refusal(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; let fake_model = model.as_fake(); - let events = thread.update(cx, |thread, cx| thread.send(model.clone(), "Hello", cx)); + let events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello"], cx) + }) + .unwrap(); cx.run_until_parked(); thread.read_with(cx, |thread, _| { assert_eq!( thread.to_markdown(), indoc! {" - ## user + ## User + Hello "} ); @@ -499,9 +1095,12 @@ async fn test_refusal(cx: &mut TestAppContext) { assert_eq!( thread.to_markdown(), indoc! {" - ## user + ## User + Hello - ## assistant + + ## Assistant + Hey! "} ); @@ -517,6 +1116,276 @@ async fn test_refusal(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_truncate_first_message(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let message_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(message_id.clone(), ["Hello"], cx) + }) + .unwrap(); + cx.run_until_parked(); + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Hello + "} + ); + assert_eq!(thread.latest_token_usage(), None); + }); + + fake_model.send_last_completion_stream_text_chunk("Hey!"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 32_000, + output_tokens: 16_000, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + cx.run_until_parked(); + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Hello + + ## Assistant + + Hey! + "} + ); + assert_eq!( + thread.latest_token_usage(), + Some(acp_thread::TokenUsage { + used_tokens: 32_000 + 16_000, + max_tokens: 1_000_000, + }) + ); + }); + + thread + .update(cx, |thread, cx| thread.truncate(message_id, cx)) + .unwrap(); + cx.run_until_parked(); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.to_markdown(), ""); + assert_eq!(thread.latest_token_usage(), None); + }); + + // Ensure we can still send a new message after truncation. + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hi"], cx) + }) + .unwrap(); + thread.update(cx, |thread, _cx| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Hi + "} + ); + }); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Ahoy!"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 40_000, + output_tokens: 20_000, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + cx.run_until_parked(); + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Hi + + ## Assistant + + Ahoy! + "} + ); + + assert_eq!( + thread.latest_token_usage(), + Some(acp_thread::TokenUsage { + used_tokens: 40_000 + 20_000, + max_tokens: 1_000_000, + }) + ); + }); +} + +#[gpui::test] +async fn test_truncate_second_message(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Message 1"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Message 1 response"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 32_000, + output_tokens: 16_000, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let assert_first_message_state = |cx: &mut TestAppContext| { + thread.clone().read_with(cx, |thread, _| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Message 1 + + ## Assistant + + Message 1 response + "} + ); + + assert_eq!( + thread.latest_token_usage(), + Some(acp_thread::TokenUsage { + used_tokens: 32_000 + 16_000, + max_tokens: 1_000_000, + }) + ); + }); + }; + + assert_first_message_state(cx); + + let second_message_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(second_message_id.clone(), ["Message 2"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_text_chunk("Message 2 response"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 40_000, + output_tokens: 20_000, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Message 1 + + ## Assistant + + Message 1 response + + ## User + + Message 2 + + ## Assistant + + Message 2 response + "} + ); + + assert_eq!( + thread.latest_token_usage(), + Some(acp_thread::TokenUsage { + used_tokens: 40_000 + 20_000, + max_tokens: 1_000_000, + }) + ); + }); + + thread + .update(cx, |thread, cx| thread.truncate(second_message_id, cx)) + .unwrap(); + cx.run_until_parked(); + + assert_first_message_state(cx); +} + +#[gpui::test] +async fn test_title_generation(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let summary_model = Arc::new(FakeLanguageModel::default()); + thread.update(cx, |thread, cx| { + thread.set_summarization_model(Some(summary_model.clone()), cx) + }); + + let send = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_text_chunk("Hey!"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "New Thread")); + + // Ensure the summary model has been invoked to generate a title. + summary_model.send_last_completion_stream_text_chunk("Hello "); + summary_model.send_last_completion_stream_text_chunk("world\nG"); + summary_model.send_last_completion_stream_text_chunk("oodnight Moon"); + summary_model.end_last_completion_stream(); + send.collect::>().await; + thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); + + // Send another message, ensuring no title is generated this time. + let send = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello again"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hey again!"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + assert_eq!(summary_model.pending_completions(), Vec::new()); + send.collect::>().await; + thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); +} + #[gpui::test] async fn test_agent_connection(cx: &mut TestAppContext) { cx.update(settings::init); @@ -532,22 +1401,32 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let client = Client::new(clock, http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); language_model::init(client.clone(), cx); - language_models::init(user_store.clone(), client.clone(), cx); + language_models::init(user_store, client.clone(), cx); Project::init_settings(cx); LanguageModelRegistry::test(cx); + agent_settings::init(cx); }); cx.executor().forbid_parking(); // Create a project for new_thread let fake_fs = cx.update(|cx| fs::FakeFs::new(cx.background_executor().clone())); fake_fs.insert_tree(path!("/test"), json!({})).await; - let project = Project::test(fake_fs, [Path::new("/test")], cx).await; + let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await; let cwd = Path::new("/test"); + let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); // Create agent and connection - let agent = NativeAgent::new(project.clone(), templates.clone(), None, &mut cx.to_async()) - .await - .unwrap(); + let agent = NativeAgent::new( + project.clone(), + history_store, + templates.clone(), + None, + fake_fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); let connection = NativeAgentConnection(agent.clone()); // Test model_selector returns Some @@ -560,22 +1439,22 @@ async fn test_agent_connection(cx: &mut TestAppContext) { // Test list_models let listed_models = cx - .update(|cx| { - let mut async_cx = cx.to_async(); - selector.list_models(&mut async_cx) - }) + .update(|cx| selector.list_models(cx)) .await .expect("list_models should succeed"); + let AgentModelList::Grouped(listed_models) = listed_models else { + panic!("Unexpected model list type"); + }; assert!(!listed_models.is_empty(), "should have at least one model"); - assert_eq!(listed_models[0].id().0, "fake"); + assert_eq!( + listed_models[&AgentModelGroupName("Fake".into())][0].id.0, + "fake/fake" + ); // Create a thread using new_thread let connection_rc = Rc::new(connection.clone()); let acp_thread = cx - .update(|cx| { - let mut async_cx = cx.to_async(); - connection_rc.new_thread(project, cwd, &mut async_cx) - }) + .update(|cx| connection_rc.new_thread(project, cwd, cx)) .await .expect("new_thread should succeed"); @@ -584,12 +1463,12 @@ async fn test_agent_connection(cx: &mut TestAppContext) { // Test selected_model returns the default let model = cx - .update(|cx| { - let mut async_cx = cx.to_async(); - selector.selected_model(&session_id, &mut async_cx) - }) + .update(|cx| selector.selected_model(&session_id, cx)) .await .expect("selected_model should succeed"); + let model = cx + .update(|cx| agent.read(cx).models().model_from_id(&model.id)) + .unwrap(); let model = model.as_fake(); assert_eq!(model.id().0, "fake", "should return default model"); @@ -623,6 +1502,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let result = cx .update(|cx| { connection.prompt( + Some(acp_thread::UserMessageId::new()), acp::PromptRequest { session_id: session_id.clone(), prompt: vec!["ghi".into()], @@ -645,16 +1525,33 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { thread.update(cx, |thread, _cx| thread.add_tool(ThinkingTool)); let fake_model = model.as_fake(); - let mut events = thread.update(cx, |thread, cx| thread.send(model.clone(), "Think", cx)); + let mut events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Think"], cx) + }) + .unwrap(); cx.run_until_parked(); - let input = json!({ "content": "Thinking hard!" }); + // Simulate streaming partial input. + let input = json!({}); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: "1".into(), name: ThinkingTool.name().into(), raw_input: input.to_string(), input, + is_input_complete: false, + }, + )); + + // Input streaming completed + let input = json!({ "content": "Thinking hard!" }); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "1".into(), + name: "thinking".into(), + raw_input: input.to_string(), + input, is_input_complete: true, }, )); @@ -671,22 +1568,35 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { status: acp::ToolCallStatus::Pending, content: vec![], locations: vec![], - raw_input: Some(json!({ "content": "Thinking hard!" })), + raw_input: Some(json!({})), raw_output: None, } ); - let update = expect_tool_call_update(&mut events).await; + let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, acp::ToolCallUpdate { id: acp::ToolCallId("1".into()), fields: acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::InProgress,), + title: Some("Thinking".into()), + kind: Some(acp::ToolKind::Think), + raw_input: Some(json!({ "content": "Thinking hard!" })), ..Default::default() }, } ); - let update = expect_tool_call_update(&mut events).await; + let update = expect_tool_call_update_fields(&mut events).await; + assert_eq!( + update, + acp::ToolCallUpdate { + id: acp::ToolCallId("1".into()), + fields: acp::ToolCallUpdateFields { + status: Some(acp::ToolCallStatus::InProgress), + ..Default::default() + }, + } + ); + let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, acp::ToolCallUpdate { @@ -697,27 +1607,182 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { }, } ); - let update = expect_tool_call_update(&mut events).await; + let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, acp::ToolCallUpdate { id: acp::ToolCallId("1".into()), fields: acp::ToolCallUpdateFields { status: Some(acp::ToolCallStatus::Completed), + raw_output: Some("Finished thinking.".into()), ..Default::default() }, } ); } +#[gpui::test] +async fn test_send_no_retry_on_success(cx: &mut TestAppContext) { + let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let mut events = thread + .update(cx, |thread, cx| { + thread.set_completion_mode(agent_settings::CompletionMode::Burn, cx); + thread.send(UserMessageId::new(), ["Hello!"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_text_chunk("Hey!"); + fake_model.end_last_completion_stream(); + + let mut retry_events = Vec::new(); + while let Some(Ok(event)) = events.next().await { + match event { + ThreadEvent::Retry(retry_status) => { + retry_events.push(retry_status); + } + ThreadEvent::Stop(..) => break, + _ => {} + } + } + + assert_eq!(retry_events.len(), 0); + thread.read_with(cx, |thread, _cx| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Hello! + + ## Assistant + + Hey! + "} + ) + }); +} + +#[gpui::test] +async fn test_send_retry_on_error(cx: &mut TestAppContext) { + let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let mut events = thread + .update(cx, |thread, cx| { + thread.set_completion_mode(agent_settings::CompletionMode::Burn, cx); + thread.send(UserMessageId::new(), ["Hello!"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_error(LanguageModelCompletionError::ServerOverloaded { + provider: LanguageModelProviderName::new("Anthropic"), + retry_after: Some(Duration::from_secs(3)), + }); + fake_model.end_last_completion_stream(); + + cx.executor().advance_clock(Duration::from_secs(3)); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_text_chunk("Hey!"); + fake_model.end_last_completion_stream(); + + let mut retry_events = Vec::new(); + while let Some(Ok(event)) = events.next().await { + match event { + ThreadEvent::Retry(retry_status) => { + retry_events.push(retry_status); + } + ThreadEvent::Stop(..) => break, + _ => {} + } + } + + assert_eq!(retry_events.len(), 1); + assert!(matches!( + retry_events[0], + acp_thread::RetryStatus { attempt: 1, .. } + )); + thread.read_with(cx, |thread, _cx| { + assert_eq!( + thread.to_markdown(), + indoc! {" + ## User + + Hello! + + ## Assistant + + Hey! + "} + ) + }); +} + +#[gpui::test] +async fn test_send_max_retries_exceeded(cx: &mut TestAppContext) { + let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let mut events = thread + .update(cx, |thread, cx| { + thread.set_completion_mode(agent_settings::CompletionMode::Burn, cx); + thread.send(UserMessageId::new(), ["Hello!"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + for _ in 0..crate::thread::MAX_RETRY_ATTEMPTS + 1 { + fake_model.send_last_completion_stream_error( + LanguageModelCompletionError::ServerOverloaded { + provider: LanguageModelProviderName::new("Anthropic"), + retry_after: Some(Duration::from_secs(3)), + }, + ); + fake_model.end_last_completion_stream(); + cx.executor().advance_clock(Duration::from_secs(3)); + cx.run_until_parked(); + } + + let mut errors = Vec::new(); + let mut retry_events = Vec::new(); + while let Some(event) = events.next().await { + match event { + Ok(ThreadEvent::Retry(retry_status)) => { + retry_events.push(retry_status); + } + Ok(ThreadEvent::Stop(..)) => break, + Err(error) => errors.push(error), + _ => {} + } + } + + assert_eq!( + retry_events.len(), + crate::thread::MAX_RETRY_ATTEMPTS as usize + ); + for i in 0..crate::thread::MAX_RETRY_ATTEMPTS as usize { + assert_eq!(retry_events[i].attempt, i + 1); + } + assert_eq!(errors.len(), 1); + let error = errors[0] + .downcast_ref::() + .unwrap(); + assert!(matches!( + error, + LanguageModelCompletionError::ServerOverloaded { .. } + )); +} + /// Filters out the stop events for asserting against in tests -fn stop_events( - result_events: Vec>, -) -> Vec { +fn stop_events(result_events: Vec>) -> Vec { result_events .into_iter() .filter_map(|event| match event.unwrap() { - AgentResponseEvent::Stop(stop_reason) => Some(stop_reason), + ThreadEvent::Stop(stop_reason) => Some(stop_reason), _ => None, }) .collect() @@ -726,7 +1791,8 @@ fn stop_events( struct ThreadTest { model: Arc, thread: Entity, - project_context: Rc>, + project_context: Entity, + fs: Arc, } enum TestModel { @@ -747,28 +1813,59 @@ impl TestModel { async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.create_dir(paths::settings_file().parent().unwrap()) + .await + .unwrap(); + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "default_profile": "test-profile", + "profiles": { + "test-profile": { + "name": "Test Profile", + "tools": { + EchoTool.name(): true, + DelayTool.name(): true, + WordListTool.name(): true, + ToolRequiringPermission.name(): true, + InfiniteTool.name(): true, + } + } + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + cx.update(|cx| { settings::init(cx); Project::init_settings(cx); + agent_settings::init(cx); + gpui_tokio::init(cx); + let http_client = ReqwestClient::user_agent("agent tests").unwrap(); + cx.set_http_client(Arc::new(http_client)); + + client::init_settings(cx); + let client = Client::production(cx); + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + language_model::init(client.clone(), cx); + language_models::init(user_store, client.clone(), cx); + + watch_settings(fs.clone(), cx); }); + let templates = Templates::new(); - let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree(path!("/test"), json!({})).await; - let project = Project::test(fs, [path!("/test").as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; let model = cx .update(|cx| { - gpui_tokio::init(cx); - let http_client = ReqwestClient::user_agent("agent tests").unwrap(); - cx.set_http_client(Arc::new(http_client)); - - client::init_settings(cx); - let client = Client::production(cx); - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(client.clone(), cx); - language_models::init(user_store.clone(), client.clone(), cx); - if let TestModel::Fake = model { Task::ready(Arc::new(FakeLanguageModel::default()) as Arc<_>) } else { @@ -790,21 +1887,26 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { }) .await; - let project_context = Rc::new(RefCell::new(ProjectContext::default())); + let project_context = cx.new(|_cx| ProjectContext::default()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|_| { + let thread = cx.new(|cx| { Thread::new( project, project_context.clone(), + context_server_registry, action_log, templates, - model.clone(), + Some(model.clone()), + cx, ) }); ThreadTest { model, thread, project_context, + fs, } } @@ -815,3 +1917,26 @@ fn init_logger() { env_logger::init(); } } + +fn watch_settings(fs: Arc, cx: &mut App) { + let fs = fs.clone(); + cx.spawn({ + async move |cx| { + let mut new_settings_content_rx = settings::watch_config_file( + cx.background_executor(), + fs, + paths::settings_file().clone(), + ); + + while let Some(new_settings_content) = new_settings_content_rx.next().await { + cx.update(|cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.set_user_settings(&new_settings_content, cx) + }) + }) + .ok(); + } + } + }) + .detach(); +} diff --git a/crates/agent2/src/tests/test_tools.rs b/crates/agent2/src/tests/test_tools.rs index fd6e7e941fdb0e36e3c5a5c4ad56ebaed0a9acdf..cbff44cedfc28a0d24ea4fa12e3ac71c9135c0d8 100644 --- a/crates/agent2/src/tests/test_tools.rs +++ b/crates/agent2/src/tests/test_tools.rs @@ -7,13 +7,14 @@ use std::future; #[derive(JsonSchema, Serialize, Deserialize)] pub struct EchoToolInput { /// The text to echo. - text: String, + pub text: String, } pub struct EchoTool; impl AgentTool for EchoTool { type Input = EchoToolInput; + type Output = String; fn name(&self) -> SharedString { "echo".into() @@ -23,7 +24,7 @@ impl AgentTool for EchoTool { acp::ToolKind::Other } - fn initial_title(&self, _: Self::Input) -> SharedString { + fn initial_title(&self, _input: Result) -> SharedString { "Echo".into() } @@ -48,13 +49,18 @@ pub struct DelayTool; impl AgentTool for DelayTool { type Input = DelayToolInput; + type Output = String; fn name(&self) -> SharedString { "delay".into() } - fn initial_title(&self, input: Self::Input) -> SharedString { - format!("Delay {}ms", input.ms).into() + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + format!("Delay {}ms", input.ms).into() + } else { + "Delay".into() + } } fn kind(&self) -> acp::ToolKind { @@ -84,6 +90,7 @@ pub struct ToolRequiringPermission; impl AgentTool for ToolRequiringPermission { type Input = ToolRequiringPermissionInput; + type Output = String; fn name(&self) -> SharedString { "tool_requiring_permission".into() @@ -93,22 +100,19 @@ impl AgentTool for ToolRequiringPermission { acp::ToolKind::Other } - fn initial_title(&self, _input: Self::Input) -> SharedString { + fn initial_title(&self, _input: Result) -> SharedString { "This tool requires permission".into() } fn run( self: Arc, - input: Self::Input, + _input: Self::Input, event_stream: ToolCallEventStream, cx: &mut App, - ) -> Task> - where - Self: Sized, - { - let auth_check = self.authorize(input, event_stream); + ) -> Task> { + let authorize = event_stream.authorize("Authorize?", cx); cx.foreground_executor().spawn(async move { - auth_check.await?; + authorize.await?; Ok("Allowed".to_string()) }) } @@ -121,6 +125,7 @@ pub struct InfiniteTool; impl AgentTool for InfiniteTool { type Input = InfiniteToolInput; + type Output = String; fn name(&self) -> SharedString { "infinite".into() @@ -130,8 +135,8 @@ impl AgentTool for InfiniteTool { acp::ToolKind::Other } - fn initial_title(&self, _input: Self::Input) -> SharedString { - "This is the tool that never ends... it just goes on and on my friends!".into() + fn initial_title(&self, _input: Result) -> SharedString { + "Infinite Tool".into() } fn run( @@ -171,19 +176,20 @@ pub struct WordListTool; impl AgentTool for WordListTool { type Input = WordListInput; + type Output = String; fn name(&self) -> SharedString { "word_list".into() } - fn initial_title(&self, _input: Self::Input) -> SharedString { - "List of random words".into() - } - fn kind(&self) -> acp::ToolKind { acp::ToolKind::Other } + fn initial_title(&self, _input: Result) -> SharedString { + "List of random words".into() + } + fn run( self: Arc, _input: Self::Input, diff --git a/crates/agent2/src/thread.rs b/crates/agent2/src/thread.rs index 4b8a65655f60b13cd61c81261fb9194b2a61c775..d34c92915228753f6edfe5d90a81428e27f5a562 100644 --- a/crates/agent2/src/thread.rs +++ b/crates/agent2/src/thread.rs @@ -1,55 +1,369 @@ -use crate::templates::{SystemPromptTemplate, Template, Templates}; +use crate::{ + ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, + DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, + ListDirectoryTool, MovePathTool, NowTool, OpenTool, ReadFileTool, SystemPromptTemplate, + Template, Templates, TerminalTool, ThinkingTool, WebSearchTool, +}; +use acp_thread::{MentionUri, UserMessageId}; +use action_log::ActionLog; +use agent::thread::{GitState, ProjectSnapshot, WorktreeSnapshot}; use agent_client_protocol as acp; -use anyhow::{anyhow, Context as _, Result}; -use assistant_tool::{adapt_schema_to_format, ActionLog}; -use cloud_llm_client::{CompletionIntent, CompletionMode}; -use collections::HashMap; +use agent_settings::{ + AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_DETAILED_PROMPT, + SUMMARIZE_THREAD_PROMPT, +}; +use anyhow::{Context as _, Result, anyhow}; +use assistant_tool::adapt_schema_to_format; +use chrono::{DateTime, Utc}; +use client::{ModelRequestUsage, RequestUsage}; +use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit}; +use collections::{HashMap, IndexMap}; +use fs::Fs; use futures::{ + FutureExt, channel::{mpsc, oneshot}, + future::Shared, stream::FuturesUnordered, }; -use gpui::{App, Context, Entity, SharedString, Task}; +use git::repository::DiffType; +use gpui::{ + App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity, +}; use language_model::{ - LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool, - LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, - LanguageModelToolUse, LanguageModelToolUseId, MessageContent, Role, StopReason, + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt, + LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, + LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, + LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, +}; +use project::{ + Project, + git_store::{GitStore, RepositoryState}, }; -use log; -use project::Project; use prompt_store::ProjectContext; use schemars::{JsonSchema, Schema}; use serde::{Deserialize, Serialize}; +use settings::{Settings, update_settings_file}; use smol::stream::StreamExt; -use std::{cell::RefCell, collections::BTreeMap, fmt::Write, future::Future, rc::Rc, sync::Arc}; -use util::{markdown::MarkdownCodeBlock, ResultExt}; +use std::{ + collections::BTreeMap, + path::Path, + sync::Arc, + time::{Duration, Instant}, +}; +use std::{fmt::Write, ops::Range}; +use util::{ResultExt, markdown::MarkdownCodeBlock}; +use uuid::Uuid; + +const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user"; + +/// The ID of the user prompt that initiated a request. +/// +/// This equates to the user physically submitting a message to the model (e.g., by pressing the Enter key). +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize)] +pub struct PromptId(Arc); + +impl PromptId { + pub fn new() -> Self { + Self(Uuid::new_v4().to_string().into()) + } +} + +impl std::fmt::Display for PromptId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +pub(crate) const MAX_RETRY_ATTEMPTS: u8 = 4; +pub(crate) const BASE_RETRY_DELAY: Duration = Duration::from_secs(5); #[derive(Debug, Clone)] -pub struct AgentMessage { - pub role: Role, - pub content: Vec, +enum RetryStrategy { + ExponentialBackoff { + initial_delay: Duration, + max_attempts: u8, + }, + Fixed { + delay: Duration, + max_attempts: u8, + }, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum Message { + User(UserMessage), + Agent(AgentMessage), + Resume, +} + +impl Message { + pub fn as_agent_message(&self) -> Option<&AgentMessage> { + match self { + Message::Agent(agent_message) => Some(agent_message), + _ => None, + } + } + + pub fn to_request(&self) -> Vec { + match self { + Message::User(message) => vec![message.to_request()], + Message::Agent(message) => message.to_request(), + Message::Resume => vec![LanguageModelRequestMessage { + role: Role::User, + content: vec!["Continue where you left off".into()], + cache: false, + }], + } + } + + pub fn to_markdown(&self) -> String { + match self { + Message::User(message) => message.to_markdown(), + Message::Agent(message) => message.to_markdown(), + Message::Resume => "[resumed after tool use limit was reached]".into(), + } + } + + pub fn role(&self) -> Role { + match self { + Message::User(_) | Message::Resume => Role::User, + Message::Agent(_) => Role::Assistant, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct UserMessage { + pub id: UserMessageId, + pub content: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum UserMessageContent { + Text(String), + Mention { uri: MentionUri, content: String }, + Image(LanguageModelImage), +} + +impl UserMessage { + pub fn to_markdown(&self) -> String { + let mut markdown = String::from("## User\n\n"); + + for content in &self.content { + match content { + UserMessageContent::Text(text) => { + markdown.push_str(text); + markdown.push('\n'); + } + UserMessageContent::Image(_) => { + markdown.push_str("\n"); + } + UserMessageContent::Mention { uri, content } => { + if !content.is_empty() { + let _ = writeln!(&mut markdown, "{}\n\n{}", uri.as_link(), content); + } else { + let _ = writeln!(&mut markdown, "{}", uri.as_link()); + } + } + } + } + + markdown + } + + fn to_request(&self) -> LanguageModelRequestMessage { + let mut message = LanguageModelRequestMessage { + role: Role::User, + content: Vec::with_capacity(self.content.len()), + cache: false, + }; + + const OPEN_CONTEXT: &str = "\n\ + The following items were attached by the user. \ + They are up-to-date and don't need to be re-read.\n\n"; + + const OPEN_FILES_TAG: &str = ""; + const OPEN_DIRECTORIES_TAG: &str = ""; + const OPEN_SYMBOLS_TAG: &str = ""; + const OPEN_THREADS_TAG: &str = ""; + const OPEN_FETCH_TAG: &str = ""; + const OPEN_RULES_TAG: &str = + "\nThe user has specified the following rules that should be applied:\n"; + + let mut file_context = OPEN_FILES_TAG.to_string(); + let mut directory_context = OPEN_DIRECTORIES_TAG.to_string(); + let mut symbol_context = OPEN_SYMBOLS_TAG.to_string(); + let mut thread_context = OPEN_THREADS_TAG.to_string(); + let mut fetch_context = OPEN_FETCH_TAG.to_string(); + let mut rules_context = OPEN_RULES_TAG.to_string(); + + for chunk in &self.content { + let chunk = match chunk { + UserMessageContent::Text(text) => { + language_model::MessageContent::Text(text.clone()) + } + UserMessageContent::Image(value) => { + language_model::MessageContent::Image(value.clone()) + } + UserMessageContent::Mention { uri, content } => { + match uri { + MentionUri::File { abs_path } => { + write!( + &mut symbol_context, + "\n{}", + MarkdownCodeBlock { + tag: &codeblock_tag(abs_path, None), + text: &content.to_string(), + } + ) + .ok(); + } + MentionUri::Directory { .. } => { + write!(&mut directory_context, "\n{}\n", content).ok(); + } + MentionUri::Symbol { + path, line_range, .. + } + | MentionUri::Selection { + path, line_range, .. + } => { + write!( + &mut rules_context, + "\n{}", + MarkdownCodeBlock { + tag: &codeblock_tag(path, Some(line_range)), + text: content + } + ) + .ok(); + } + MentionUri::Thread { .. } => { + write!(&mut thread_context, "\n{}\n", content).ok(); + } + MentionUri::TextThread { .. } => { + write!(&mut thread_context, "\n{}\n", content).ok(); + } + MentionUri::Rule { .. } => { + write!( + &mut rules_context, + "\n{}", + MarkdownCodeBlock { + tag: "", + text: content + } + ) + .ok(); + } + MentionUri::Fetch { url } => { + write!(&mut fetch_context, "\nFetch: {}\n\n{}", url, content).ok(); + } + } + + language_model::MessageContent::Text(uri.as_link().to_string()) + } + }; + + message.content.push(chunk); + } + + let len_before_context = message.content.len(); + + if file_context.len() > OPEN_FILES_TAG.len() { + file_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(file_context)); + } + + if directory_context.len() > OPEN_DIRECTORIES_TAG.len() { + directory_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(directory_context)); + } + + if symbol_context.len() > OPEN_SYMBOLS_TAG.len() { + symbol_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(symbol_context)); + } + + if thread_context.len() > OPEN_THREADS_TAG.len() { + thread_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(thread_context)); + } + + if fetch_context.len() > OPEN_FETCH_TAG.len() { + fetch_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(fetch_context)); + } + + if rules_context.len() > OPEN_RULES_TAG.len() { + rules_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(rules_context)); + } + + if message.content.len() > len_before_context { + message.content.insert( + len_before_context, + language_model::MessageContent::Text(OPEN_CONTEXT.into()), + ); + message + .content + .push(language_model::MessageContent::Text("".into())); + } + + message + } +} + +fn codeblock_tag(full_path: &Path, line_range: Option<&Range>) -> String { + let mut result = String::new(); + + if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) { + let _ = write!(result, "{} ", extension); + } + + let _ = write!(result, "{}", full_path.display()); + + if let Some(range) = line_range { + if range.start == range.end { + let _ = write!(result, ":{}", range.start + 1); + } else { + let _ = write!(result, ":{}-{}", range.start + 1, range.end + 1); + } + } + + result } impl AgentMessage { pub fn to_markdown(&self) -> String { - let mut markdown = format!("## {}\n", self.role); + let mut markdown = String::from("## Assistant\n\n"); for content in &self.content { match content { - MessageContent::Text(text) => { + AgentMessageContent::Text(text) => { markdown.push_str(text); markdown.push('\n'); } - MessageContent::Thinking { text, .. } => { + AgentMessageContent::Thinking { text, .. } => { markdown.push_str(""); markdown.push_str(text); markdown.push_str("\n"); } - MessageContent::RedactedThinking(_) => markdown.push_str("\n"), - MessageContent::Image(_) => { - markdown.push_str("\n"); + AgentMessageContent::RedactedThinking(_) => { + markdown.push_str("\n") } - MessageContent::ToolUse(tool_use) => { + AgentMessageContent::ToolUse(tool_use) => { markdown.push_str(&format!( "**Tool Use**: {} (ID: {})\n", tool_use.name, tool_use.id @@ -62,99 +376,578 @@ impl AgentMessage { } )); } - MessageContent::ToolResult(tool_result) => { - markdown.push_str(&format!( - "**Tool Result**: {} (ID: {})\n\n", - tool_result.tool_name, tool_result.tool_use_id - )); - if tool_result.is_error { - markdown.push_str("**ERROR:**\n"); - } + } + } - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - writeln!(markdown, "{text}\n").ok(); - } - LanguageModelToolResultContent::Image(_) => { - writeln!(markdown, "\n").ok(); - } - } + for tool_result in self.tool_results.values() { + markdown.push_str(&format!( + "**Tool Result**: {} (ID: {})\n\n", + tool_result.tool_name, tool_result.tool_use_id + )); + if tool_result.is_error { + markdown.push_str("**ERROR:**\n"); + } - if let Some(output) = tool_result.output.as_ref() { - writeln!( - markdown, - "**Debug Output**:\n\n```json\n{}\n```\n", - serde_json::to_string_pretty(output).unwrap() - ) - .unwrap(); - } + match &tool_result.content { + LanguageModelToolResultContent::Text(text) => { + writeln!(markdown, "{text}\n").ok(); + } + LanguageModelToolResultContent::Image(_) => { + writeln!(markdown, "\n").ok(); } } + + if let Some(output) = tool_result.output.as_ref() { + writeln!( + markdown, + "**Debug Output**:\n\n```json\n{}\n```\n", + serde_json::to_string_pretty(output).unwrap() + ) + .unwrap(); + } } markdown } + + pub fn to_request(&self) -> Vec { + let mut assistant_message = LanguageModelRequestMessage { + role: Role::Assistant, + content: Vec::with_capacity(self.content.len()), + cache: false, + }; + for chunk in &self.content { + let chunk = match chunk { + AgentMessageContent::Text(text) => { + language_model::MessageContent::Text(text.clone()) + } + AgentMessageContent::Thinking { text, signature } => { + language_model::MessageContent::Thinking { + text: text.clone(), + signature: signature.clone(), + } + } + AgentMessageContent::RedactedThinking(value) => { + language_model::MessageContent::RedactedThinking(value.clone()) + } + AgentMessageContent::ToolUse(value) => { + language_model::MessageContent::ToolUse(value.clone()) + } + }; + assistant_message.content.push(chunk); + } + + let mut user_message = LanguageModelRequestMessage { + role: Role::User, + content: Vec::new(), + cache: false, + }; + + for tool_result in self.tool_results.values() { + user_message + .content + .push(language_model::MessageContent::ToolResult( + tool_result.clone(), + )); + } + + let mut messages = Vec::new(); + if !assistant_message.content.is_empty() { + messages.push(assistant_message); + } + if !user_message.content.is_empty() { + messages.push(user_message); + } + messages + } } -#[derive(Debug)] -pub enum AgentResponseEvent { +#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AgentMessage { + pub content: Vec, + pub tool_results: IndexMap, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum AgentMessageContent { Text(String), - Thinking(String), + Thinking { + text: String, + signature: Option, + }, + RedactedThinking(String), + ToolUse(LanguageModelToolUse), +} + +#[derive(Debug)] +pub enum ThreadEvent { + UserMessage(UserMessage), + AgentText(String), + AgentThinking(String), ToolCall(acp::ToolCall), - ToolCallUpdate(acp::ToolCallUpdate), + ToolCallUpdate(acp_thread::ToolCallUpdate), ToolCallAuthorization(ToolCallAuthorization), + TitleUpdate(SharedString), + Retry(acp_thread::RetryStatus), Stop(acp::StopReason), } #[derive(Debug)] pub struct ToolCallAuthorization { - pub tool_call: acp::ToolCall, + pub tool_call: acp::ToolCallUpdate, pub options: Vec, pub response: oneshot::Sender, } pub struct Thread { - messages: Vec, + id: acp::SessionId, + prompt_id: PromptId, + updated_at: DateTime, + title: Option, + summary: Option, + messages: Vec, completion_mode: CompletionMode, /// Holds the task that handles agent interaction until the end of the turn. /// Survives across multiple requests as the model performs tool calls and /// we run tools, report their results. - running_turn: Option>, - pending_tool_uses: HashMap, + running_turn: Option, + pending_message: Option, tools: BTreeMap>, - project_context: Rc>, + tool_use_limit_reached: bool, + request_token_usage: HashMap, + #[allow(unused)] + cumulative_token_usage: TokenUsage, + #[allow(unused)] + initial_project_snapshot: Shared>>>, + context_server_registry: Entity, + profile_id: AgentProfileId, + project_context: Entity, templates: Arc, - pub selected_model: Arc, - action_log: Entity, + model: Option>, + summarization_model: Option>, + pub(crate) project: Entity, + pub(crate) action_log: Entity, } impl Thread { pub fn new( - _project: Entity, - project_context: Rc>, + project: Entity, + project_context: Entity, + context_server_registry: Entity, action_log: Entity, templates: Arc, - default_model: Arc, + model: Option>, + cx: &mut Context, ) -> Self { + let profile_id = AgentSettings::get_global(cx).default_profile.clone(); Self { + id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()), + prompt_id: PromptId::new(), + updated_at: Utc::now(), + title: None, + summary: None, messages: Vec::new(), - completion_mode: CompletionMode::Normal, + completion_mode: AgentSettings::get_global(cx).preferred_completion_mode, running_turn: None, - pending_tool_uses: HashMap::default(), + pending_message: None, tools: BTreeMap::default(), + tool_use_limit_reached: false, + request_token_usage: HashMap::default(), + cumulative_token_usage: TokenUsage::default(), + initial_project_snapshot: { + let project_snapshot = Self::project_snapshot(project.clone(), cx); + cx.foreground_executor() + .spawn(async move { Some(project_snapshot.await) }) + .shared() + }, + context_server_registry, + profile_id, project_context, templates, - selected_model: default_model, + model, + summarization_model: None, + project, action_log, } } - pub fn set_mode(&mut self, mode: CompletionMode) { + pub fn id(&self) -> &acp::SessionId { + &self.id + } + + pub fn replay( + &mut self, + cx: &mut Context, + ) -> mpsc::UnboundedReceiver> { + let (tx, rx) = mpsc::unbounded(); + let stream = ThreadEventStream(tx); + for message in &self.messages { + match message { + Message::User(user_message) => stream.send_user_message(user_message), + Message::Agent(assistant_message) => { + for content in &assistant_message.content { + match content { + AgentMessageContent::Text(text) => stream.send_text(text), + AgentMessageContent::Thinking { text, .. } => { + stream.send_thinking(text) + } + AgentMessageContent::RedactedThinking(_) => {} + AgentMessageContent::ToolUse(tool_use) => { + self.replay_tool_call( + tool_use, + assistant_message.tool_results.get(&tool_use.id), + &stream, + cx, + ); + } + } + } + } + Message::Resume => {} + } + } + rx + } + + fn replay_tool_call( + &self, + tool_use: &LanguageModelToolUse, + tool_result: Option<&LanguageModelToolResult>, + stream: &ThreadEventStream, + cx: &mut Context, + ) { + let Some(tool) = self.tools.get(tool_use.name.as_ref()) else { + stream + .0 + .unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall { + id: acp::ToolCallId(tool_use.id.to_string().into()), + title: tool_use.name.to_string(), + kind: acp::ToolKind::Other, + status: acp::ToolCallStatus::Failed, + content: Vec::new(), + locations: Vec::new(), + raw_input: Some(tool_use.input.clone()), + raw_output: None, + }))) + .ok(); + return; + }; + + let title = tool.initial_title(tool_use.input.clone()); + let kind = tool.kind(); + stream.send_tool_call(&tool_use.id, title, kind, tool_use.input.clone()); + + let output = tool_result + .as_ref() + .and_then(|result| result.output.clone()); + if let Some(output) = output.clone() { + let tool_event_stream = ToolCallEventStream::new( + tool_use.id.clone(), + stream.clone(), + Some(self.project.read(cx).fs().clone()), + ); + tool.replay(tool_use.input.clone(), output, tool_event_stream, cx) + .log_err(); + } + + stream.update_tool_call_fields( + &tool_use.id, + acp::ToolCallUpdateFields { + status: Some(acp::ToolCallStatus::Completed), + raw_output: output, + ..Default::default() + }, + ); + } + + pub fn from_db( + id: acp::SessionId, + db_thread: DbThread, + project: Entity, + project_context: Entity, + context_server_registry: Entity, + action_log: Entity, + templates: Arc, + cx: &mut Context, + ) -> Self { + let profile_id = db_thread + .profile + .unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone()); + let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + db_thread + .model + .and_then(|model| { + let model = SelectedModel { + provider: model.provider.clone().into(), + model: model.model.into(), + }; + registry.select_model(&model, cx) + }) + .or_else(|| registry.default_model()) + .map(|model| model.model) + }); + + Self { + id, + prompt_id: PromptId::new(), + title: if db_thread.title.is_empty() { + None + } else { + Some(db_thread.title.clone()) + }, + summary: db_thread.detailed_summary, + messages: db_thread.messages, + completion_mode: db_thread.completion_mode.unwrap_or_default(), + running_turn: None, + pending_message: None, + tools: BTreeMap::default(), + tool_use_limit_reached: false, + request_token_usage: db_thread.request_token_usage.clone(), + cumulative_token_usage: db_thread.cumulative_token_usage, + initial_project_snapshot: Task::ready(db_thread.initial_project_snapshot).shared(), + context_server_registry, + profile_id, + project_context, + templates, + model, + summarization_model: None, + project, + action_log, + updated_at: db_thread.updated_at, + } + } + + pub fn to_db(&self, cx: &App) -> Task { + let initial_project_snapshot = self.initial_project_snapshot.clone(); + let mut thread = DbThread { + title: self.title(), + messages: self.messages.clone(), + updated_at: self.updated_at, + detailed_summary: self.summary.clone(), + initial_project_snapshot: None, + cumulative_token_usage: self.cumulative_token_usage, + request_token_usage: self.request_token_usage.clone(), + model: self.model.as_ref().map(|model| DbLanguageModel { + provider: model.provider_id().to_string(), + model: model.name().0.to_string(), + }), + completion_mode: Some(self.completion_mode), + profile: Some(self.profile_id.clone()), + }; + + cx.background_spawn(async move { + let initial_project_snapshot = initial_project_snapshot.await; + thread.initial_project_snapshot = initial_project_snapshot; + thread + }) + } + + /// Create a snapshot of the current project state including git information and unsaved buffers. + fn project_snapshot( + project: Entity, + cx: &mut Context, + ) -> Task> { + let git_store = project.read(cx).git_store().clone(); + let worktree_snapshots: Vec<_> = project + .read(cx) + .visible_worktrees(cx) + .map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx)) + .collect(); + + cx.spawn(async move |_, cx| { + let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; + + let mut unsaved_buffers = Vec::new(); + cx.update(|app_cx| { + let buffer_store = project.read(app_cx).buffer_store(); + for buffer_handle in buffer_store.read(app_cx).buffers() { + let buffer = buffer_handle.read(app_cx); + if buffer.is_dirty() + && let Some(file) = buffer.file() + { + let path = file.path().to_string_lossy().to_string(); + unsaved_buffers.push(path); + } + } + }) + .ok(); + + Arc::new(ProjectSnapshot { + worktree_snapshots, + unsaved_buffer_paths: unsaved_buffers, + timestamp: Utc::now(), + }) + }) + } + + fn worktree_snapshot( + worktree: Entity, + git_store: Entity, + cx: &App, + ) -> Task { + cx.spawn(async move |cx| { + // Get worktree path and snapshot + let worktree_info = cx.update(|app_cx| { + let worktree = worktree.read(app_cx); + let path = worktree.abs_path().to_string_lossy().to_string(); + let snapshot = worktree.snapshot(); + (path, snapshot) + }); + + let Ok((worktree_path, _snapshot)) = worktree_info else { + return WorktreeSnapshot { + worktree_path: String::new(), + git_state: None, + }; + }; + + let git_state = git_store + .update(cx, |git_store, cx| { + git_store + .repositories() + .values() + .find(|repo| { + repo.read(cx) + .abs_path_to_repo_path(&worktree.read(cx).abs_path()) + .is_some() + }) + .cloned() + }) + .ok() + .flatten() + .map(|repo| { + repo.update(cx, |repo, _| { + let current_branch = + repo.branch.as_ref().map(|branch| branch.name().to_owned()); + repo.send_job(None, |state, _| async move { + let RepositoryState::Local { backend, .. } = state else { + return GitState { + remote_url: None, + head_sha: None, + current_branch, + diff: None, + }; + }; + + let remote_url = backend.remote_url("origin"); + let head_sha = backend.head_sha().await; + let diff = backend.diff(DiffType::HeadToWorktree).await.ok(); + + GitState { + remote_url, + head_sha, + current_branch, + diff, + } + }) + }) + }); + + let git_state = match git_state { + Some(git_state) => match git_state.ok() { + Some(git_state) => git_state.await.ok(), + None => None, + }, + None => None, + }; + + WorktreeSnapshot { + worktree_path, + git_state, + } + }) + } + + pub fn project_context(&self) -> &Entity { + &self.project_context + } + + pub fn project(&self) -> &Entity { + &self.project + } + + pub fn action_log(&self) -> &Entity { + &self.action_log + } + + pub fn is_empty(&self) -> bool { + self.messages.is_empty() && self.title.is_none() + } + + pub fn model(&self) -> Option<&Arc> { + self.model.as_ref() + } + + pub fn set_model(&mut self, model: Arc, cx: &mut Context) { + let old_usage = self.latest_token_usage(); + self.model = Some(model); + let new_usage = self.latest_token_usage(); + if old_usage != new_usage { + cx.emit(TokenUsageUpdated(new_usage)); + } + cx.notify() + } + + pub fn summarization_model(&self) -> Option<&Arc> { + self.summarization_model.as_ref() + } + + pub fn set_summarization_model( + &mut self, + model: Option>, + cx: &mut Context, + ) { + self.summarization_model = model; + cx.notify() + } + + pub fn completion_mode(&self) -> CompletionMode { + self.completion_mode + } + + pub fn set_completion_mode(&mut self, mode: CompletionMode, cx: &mut Context) { + let old_usage = self.latest_token_usage(); self.completion_mode = mode; + let new_usage = self.latest_token_usage(); + if old_usage != new_usage { + cx.emit(TokenUsageUpdated(new_usage)); + } + cx.notify() } - pub fn messages(&self) -> &[AgentMessage] { - &self.messages + #[cfg(any(test, feature = "test-support"))] + pub fn last_message(&self) -> Option { + if let Some(message) = self.pending_message.clone() { + Some(Message::Agent(message)) + } else { + self.messages.last().cloned() + } + } + + pub fn add_default_tools(&mut self, cx: &mut Context) { + let language_registry = self.project.read(cx).languages().clone(); + self.add_tool(CopyPathTool::new(self.project.clone())); + self.add_tool(CreateDirectoryTool::new(self.project.clone())); + self.add_tool(DeletePathTool::new( + self.project.clone(), + self.action_log.clone(), + )); + self.add_tool(DiagnosticsTool::new(self.project.clone())); + self.add_tool(EditFileTool::new(cx.weak_entity(), language_registry)); + self.add_tool(FetchTool::new(self.project.read(cx).client().http_client())); + self.add_tool(FindPathTool::new(self.project.clone())); + self.add_tool(GrepTool::new(self.project.clone())); + self.add_tool(ListDirectoryTool::new(self.project.clone())); + self.add_tool(MovePathTool::new(self.project.clone())); + self.add_tool(NowTool); + self.add_tool(OpenTool::new(self.project.clone())); + self.add_tool(ReadFileTool::new( + self.project.clone(), + self.action_log.clone(), + )); + self.add_tool(TerminalTool::new(self.project.clone(), cx)); + self.add_tool(ThinkingTool); + self.add_tool(WebSearchTool); // TODO: Enable this only if it's a zed model. } pub fn add_tool(&mut self, tool: impl AgentTool) { @@ -165,173 +958,372 @@ impl Thread { self.tools.remove(name).is_some() } - pub fn cancel(&mut self) { - self.running_turn.take(); + pub fn profile(&self) -> &AgentProfileId { + &self.profile_id + } - let tool_results = self - .pending_tool_uses - .drain() - .map(|(tool_use_id, tool_use)| { - MessageContent::ToolResult(LanguageModelToolResult { - tool_use_id, - tool_name: tool_use.name.clone(), - is_error: true, - content: LanguageModelToolResultContent::Text("Tool canceled by user".into()), - output: None, - }) - }) - .collect::>(); - self.last_user_message().content.extend(tool_results); + pub fn set_profile(&mut self, profile_id: AgentProfileId) { + self.profile_id = profile_id; + } + + pub fn cancel(&mut self, cx: &mut Context) { + if let Some(running_turn) = self.running_turn.take() { + running_turn.cancel(); + } + self.flush_pending_message(cx); + } + + fn update_token_usage(&mut self, update: language_model::TokenUsage, cx: &mut Context) { + let Some(last_user_message) = self.last_user_message() else { + return; + }; + + self.request_token_usage + .insert(last_user_message.id.clone(), update); + cx.emit(TokenUsageUpdated(self.latest_token_usage())); + cx.notify(); + } + + pub fn truncate(&mut self, message_id: UserMessageId, cx: &mut Context) -> Result<()> { + self.cancel(cx); + let Some(position) = self.messages.iter().position( + |msg| matches!(msg, Message::User(UserMessage { id, .. }) if id == &message_id), + ) else { + return Err(anyhow!("Message not found")); + }; + + for message in self.messages.drain(position..) { + match message { + Message::User(message) => { + self.request_token_usage.remove(&message.id); + } + Message::Agent(_) | Message::Resume => {} + } + } + self.summary = None; + cx.notify(); + Ok(()) + } + + pub fn latest_token_usage(&self) -> Option { + let last_user_message = self.last_user_message()?; + let tokens = self.request_token_usage.get(&last_user_message.id)?; + let model = self.model.clone()?; + + Some(acp_thread::TokenUsage { + max_tokens: model.max_token_count_for_mode(self.completion_mode.into()), + used_tokens: tokens.total_tokens(), + }) + } + + pub fn resume( + &mut self, + cx: &mut Context, + ) -> Result>> { + anyhow::ensure!( + self.tool_use_limit_reached, + "can only resume after tool use limit is reached" + ); + + self.messages.push(Message::Resume); + cx.notify(); + + log::info!("Total messages in thread: {}", self.messages.len()); + self.run_turn(cx) } /// Sending a message results in the model streaming a response, which could include tool calls. /// After calling tools, the model will stops and waits for any outstanding tool calls to be completed and their results sent. /// The returned channel will report all the occurrences in which the model stops before erroring or ending its turn. - pub fn send( + pub fn send( &mut self, - model: Arc, - content: impl Into, + id: UserMessageId, + content: impl IntoIterator, cx: &mut Context, - ) -> mpsc::UnboundedReceiver> { - let content = content.into(); + ) -> Result>> + where + T: Into, + { + let model = self.model().context("No language model configured")?; + log::info!("Thread::send called with model: {:?}", model.name()); + self.advance_prompt_id(); + + let content = content.into_iter().map(Into::into).collect::>(); log::debug!("Thread::send content: {:?}", content); + self.messages + .push(Message::User(UserMessage { id, content })); cx.notify(); - let (events_tx, events_rx) = - mpsc::unbounded::>(); - let event_stream = AgentResponseEventStream(events_tx); - let user_message_ix = self.messages.len(); - self.messages.push(AgentMessage { - role: Role::User, - content: vec![content], - }); log::info!("Total messages in thread: {}", self.messages.len()); - self.running_turn = Some(cx.spawn(async move |thread, cx| { - log::info!("Starting agent turn execution"); - let turn_result = async { - // Perform one request, then keep looping if the model makes tool calls. - let mut completion_intent = CompletionIntent::UserPrompt; - 'outer: loop { - log::debug!( - "Building completion request with intent: {:?}", - completion_intent - ); - let request = thread.update(cx, |thread, cx| { - thread.build_completion_request(completion_intent, cx) - })?; + self.run_turn(cx) + } - // println!( - // "request: {}", - // serde_json::to_string_pretty(&request).unwrap() - // ); - - // Stream events, appending to messages and collecting up tool uses. - log::info!("Calling model.stream_completion"); - let mut events = model.stream_completion(request, cx).await?; - log::debug!("Stream completion started successfully"); - let mut tool_uses = FuturesUnordered::new(); - while let Some(event) = events.next().await { - match event { - Ok(LanguageModelCompletionEvent::Stop(reason)) => { - event_stream.send_stop(reason); - if reason == StopReason::Refusal { - thread.update(cx, |thread, _cx| { - thread.messages.truncate(user_message_ix); - })?; - break 'outer; - } - } - Ok(event) => { - log::trace!("Received completion event: {:?}", event); - thread - .update(cx, |thread, cx| { - tool_uses.extend(thread.handle_streamed_completion_event( - event, - &event_stream, - cx, - )); - }) - .ok(); - } - Err(error) => { - log::error!("Error in completion stream: {:?}", error); - event_stream.send_error(error); - break; + fn run_turn( + &mut self, + cx: &mut Context, + ) -> Result>> { + self.cancel(cx); + + let model = self.model.clone().context("No language model configured")?; + let (events_tx, events_rx) = mpsc::unbounded::>(); + let event_stream = ThreadEventStream(events_tx); + let message_ix = self.messages.len().saturating_sub(1); + self.tool_use_limit_reached = false; + self.summary = None; + self.running_turn = Some(RunningTurn { + event_stream: event_stream.clone(), + _task: cx.spawn(async move |this, cx| { + log::info!("Starting agent turn execution"); + let mut update_title = None; + let turn_result: Result = async { + let mut completion_intent = CompletionIntent::UserPrompt; + loop { + log::debug!( + "Building completion request with intent: {:?}", + completion_intent + ); + let request = this.update(cx, |this, cx| { + this.build_completion_request(completion_intent, cx) + })??; + + log::info!("Calling model.stream_completion"); + + let mut tool_use_limit_reached = false; + let mut refused = false; + let mut reached_max_tokens = false; + let mut tool_uses = Self::stream_completion_with_retries( + this.clone(), + model.clone(), + request, + &event_stream, + &mut tool_use_limit_reached, + &mut refused, + &mut reached_max_tokens, + cx, + ) + .await?; + + if refused { + return Ok(StopReason::Refusal); + } else if reached_max_tokens { + return Ok(StopReason::MaxTokens); + } + + let end_turn = tool_uses.is_empty(); + while let Some(tool_result) = tool_uses.next().await { + log::info!("Tool finished {:?}", tool_result); + + event_stream.update_tool_call_fields( + &tool_result.tool_use_id, + acp::ToolCallUpdateFields { + status: Some(if tool_result.is_error { + acp::ToolCallStatus::Failed + } else { + acp::ToolCallStatus::Completed + }), + raw_output: tool_result.output.clone(), + ..Default::default() + }, + ); + this.update(cx, |this, _cx| { + this.pending_message() + .tool_results + .insert(tool_result.tool_use_id.clone(), tool_result); + })?; + } + + this.update(cx, |this, cx| { + if this.title.is_none() && update_title.is_none() { + update_title = Some(this.update_title(&event_stream, cx)); } + })?; + + if tool_use_limit_reached { + log::info!("Tool use limit reached, completing turn"); + this.update(cx, |this, _cx| this.tool_use_limit_reached = true)?; + return Err(language_model::ToolUseLimitReachedError.into()); + } else if end_turn { + log::info!("No tool uses found, completing turn"); + return Ok(StopReason::EndTurn); + } else { + this.update(cx, |this, cx| this.flush_pending_message(cx))?; + completion_intent = CompletionIntent::ToolResults; + } + } + } + .await; + _ = this.update(cx, |this, cx| this.flush_pending_message(cx)); + + match turn_result { + Ok(reason) => { + log::info!("Turn execution completed: {:?}", reason); + + if let Some(update_title) = update_title { + update_title.await.context("update title failed").log_err(); + } + + event_stream.send_stop(reason); + if reason == StopReason::Refusal { + _ = this.update(cx, |this, _| this.messages.truncate(message_ix)); } } + Err(error) => { + log::error!("Turn execution failed: {:?}", error); + event_stream.send_error(error); + } + } + + _ = this.update(cx, |this, _| this.running_turn.take()); + }), + }); + Ok(events_rx) + } - // If there are no tool uses, the turn is done. - if tool_uses.is_empty() { - log::info!("No tool uses found, completing turn"); - break; + async fn stream_completion_with_retries( + this: WeakEntity, + model: Arc, + request: LanguageModelRequest, + event_stream: &ThreadEventStream, + tool_use_limit_reached: &mut bool, + refusal: &mut bool, + max_tokens_reached: &mut bool, + cx: &mut AsyncApp, + ) -> Result>> { + log::debug!("Stream completion started successfully"); + + let mut attempt = None; + 'retry: loop { + telemetry::event!( + "Agent Thread Completion", + thread_id = this.read_with(cx, |this, _| this.id.to_string())?, + prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, + model = model.telemetry_id(), + model_provider = model.provider_id().to_string(), + attempt + ); + + let mut events = model.stream_completion(request.clone(), cx).await?; + let mut tool_uses = FuturesUnordered::new(); + while let Some(event) = events.next().await { + match event { + Ok(LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::ToolUseLimitReached, + )) => { + *tool_use_limit_reached = true; } - log::info!("Found {} tool uses to execute", tool_uses.len()); - - // As tool results trickle in, insert them in the last user - // message so that they can be sent on the next tick of the - // agentic loop. - while let Some(tool_result) = tool_uses.next().await { - log::info!("Tool finished {:?}", tool_result); - - event_stream.send_tool_call_update( - &tool_result.tool_use_id, - acp::ToolCallUpdateFields { - status: Some(if tool_result.is_error { - acp::ToolCallStatus::Failed - } else { - acp::ToolCallStatus::Completed - }), - ..Default::default() - }, + Ok(LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::UsageUpdated { amount, limit }, + )) => { + this.update(cx, |this, cx| { + this.update_model_request_usage(amount, limit, cx) + })?; + } + Ok(LanguageModelCompletionEvent::UsageUpdate(usage)) => { + telemetry::event!( + "Agent Thread Completion Usage Updated", + thread_id = this.read_with(cx, |this, _| this.id.to_string())?, + prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, + model = model.telemetry_id(), + model_provider = model.provider_id().to_string(), + attempt, + input_tokens = usage.input_tokens, + output_tokens = usage.output_tokens, + cache_creation_input_tokens = usage.cache_creation_input_tokens, + cache_read_input_tokens = usage.cache_read_input_tokens, ); - thread - .update(cx, |thread, _cx| { - thread.pending_tool_uses.remove(&tool_result.tool_use_id); - thread - .last_user_message() - .content - .push(MessageContent::ToolResult(tool_result)); - }) - .ok(); + + this.update(cx, |this, cx| this.update_token_usage(usage, cx))?; } + Ok(LanguageModelCompletionEvent::Stop(StopReason::Refusal)) => { + *refusal = true; + return Ok(FuturesUnordered::default()); + } + Ok(LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)) => { + *max_tokens_reached = true; + return Ok(FuturesUnordered::default()); + } + Ok(LanguageModelCompletionEvent::Stop( + StopReason::ToolUse | StopReason::EndTurn, + )) => break, + Ok(event) => { + log::trace!("Received completion event: {:?}", event); + this.update(cx, |this, cx| { + tool_uses.extend(this.handle_streamed_completion_event( + event, + event_stream, + cx, + )); + })?; + } + Err(error) => { + let completion_mode = + this.read_with(cx, |thread, _cx| thread.completion_mode())?; + if completion_mode == CompletionMode::Normal { + return Err(error.into()); + } - completion_intent = CompletionIntent::ToolResults; - } + let Some(strategy) = Self::retry_strategy_for(&error) else { + return Err(error.into()); + }; - Ok(()) - } - .await; + let max_attempts = match &strategy { + RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts, + RetryStrategy::Fixed { max_attempts, .. } => *max_attempts, + }; - if let Err(error) = turn_result { - log::error!("Turn execution failed: {:?}", error); - event_stream.send_error(error); - } else { - log::info!("Turn execution completed successfully"); + let attempt = attempt.get_or_insert(0u8); + + *attempt += 1; + + let attempt = *attempt; + if attempt > max_attempts { + return Err(error.into()); + } + + let delay = match &strategy { + RetryStrategy::ExponentialBackoff { initial_delay, .. } => { + let delay_secs = + initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32); + Duration::from_secs(delay_secs) + } + RetryStrategy::Fixed { delay, .. } => *delay, + }; + log::debug!("Retry attempt {attempt} with delay {delay:?}"); + + event_stream.send_retry(acp_thread::RetryStatus { + last_error: error.to_string().into(), + attempt: attempt as usize, + max_attempts: max_attempts as usize, + started_at: Instant::now(), + duration: delay, + }); + + cx.background_executor().timer(delay).await; + continue 'retry; + } + } } - })); - events_rx - } - pub fn action_log(&self) -> &Entity { - &self.action_log + return Ok(tool_uses); + } } - pub fn build_system_message(&self) -> AgentMessage { + pub fn build_system_message(&self, cx: &App) -> LanguageModelRequestMessage { log::debug!("Building system message"); let prompt = SystemPromptTemplate { - project: &self.project_context.borrow(), + project: self.project_context.read(cx), available_tools: self.tools.keys().cloned().collect(), } .render(&self.templates) .context("failed to build system prompt") .expect("Invalid template"); log::debug!("System message built"); - AgentMessage { + LanguageModelRequestMessage { role: Role::System, content: vec![prompt.into()], + cache: true, } } @@ -341,7 +1333,7 @@ impl Thread { fn handle_streamed_completion_event( &mut self, event: LanguageModelCompletionEvent, - event_stream: &AgentResponseEventStream, + event_stream: &ThreadEventStream, cx: &mut Context, ) -> Option> { log::trace!("Handling streamed completion event: {:?}", event); @@ -349,10 +1341,8 @@ impl Thread { match event { StartMessage { .. } => { - self.messages.push(AgentMessage { - role: Role::Assistant, - content: Vec::new(), - }); + self.flush_pending_message(cx); + self.pending_message = Some(AgentMessage::default()); } Text(new_text) => self.handle_text_event(new_text, event_stream, cx), Thinking { text, signature } => { @@ -375,8 +1365,8 @@ impl Thread { json_parse_error, ))); } - UsageUpdate(_) | StatusUpdate(_) => {} - Stop(_) => unreachable!(), + StatusUpdate(_) => {} + UsageUpdate(_) | Stop(_) => unreachable!(), } None @@ -385,16 +1375,18 @@ impl Thread { fn handle_text_event( &mut self, new_text: String, - events_stream: &AgentResponseEventStream, + event_stream: &ThreadEventStream, cx: &mut Context, ) { - events_stream.send_text(&new_text); + event_stream.send_text(&new_text); - let last_message = self.last_assistant_message(); - if let Some(MessageContent::Text(text)) = last_message.content.last_mut() { + let last_message = self.pending_message(); + if let Some(AgentMessageContent::Text(text)) = last_message.content.last_mut() { text.push_str(&new_text); } else { - last_message.content.push(MessageContent::Text(new_text)); + last_message + .content + .push(AgentMessageContent::Text(new_text)); } cx.notify(); @@ -404,18 +1396,19 @@ impl Thread { &mut self, new_text: String, new_signature: Option, - event_stream: &AgentResponseEventStream, + event_stream: &ThreadEventStream, cx: &mut Context, ) { event_stream.send_thinking(&new_text); - let last_message = self.last_assistant_message(); - if let Some(MessageContent::Thinking { text, signature }) = last_message.content.last_mut() + let last_message = self.pending_message(); + if let Some(AgentMessageContent::Thinking { text, signature }) = + last_message.content.last_mut() { text.push_str(&new_text); *signature = new_signature.or(signature.take()); } else { - last_message.content.push(MessageContent::Thinking { + last_message.content.push(AgentMessageContent::Thinking { text: new_text, signature: new_signature, }); @@ -425,30 +1418,33 @@ impl Thread { } fn handle_redacted_thinking_event(&mut self, data: String, cx: &mut Context) { - let last_message = self.last_assistant_message(); + let last_message = self.pending_message(); last_message .content - .push(MessageContent::RedactedThinking(data)); + .push(AgentMessageContent::RedactedThinking(data)); cx.notify(); } fn handle_tool_use_event( &mut self, tool_use: LanguageModelToolUse, - event_stream: &AgentResponseEventStream, + event_stream: &ThreadEventStream, cx: &mut Context, ) -> Option> { cx.notify(); let tool = self.tools.get(tool_use.name.as_ref()).cloned(); - - self.pending_tool_uses - .insert(tool_use.id.clone(), tool_use.clone()); - let last_message = self.last_assistant_message(); + let mut title = SharedString::from(&tool_use.name); + let mut kind = acp::ToolKind::Other; + if let Some(tool) = tool.as_ref() { + title = tool.initial_title(tool_use.input.clone()); + kind = tool.kind(); + } // Ensure the last message ends in the current tool use - let push_new_tool_use = last_message.content.last_mut().map_or(true, |content| { - if let MessageContent::ToolUse(last_tool_use) = content { + let last_message = self.pending_message(); + let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| { + if let AgentMessageContent::ToolUse(last_tool_use) = content { if last_tool_use.id == tool_use.id { *last_tool_use = tool_use.clone(); false @@ -461,14 +1457,16 @@ impl Thread { }); if push_new_tool_use { - event_stream.send_tool_call(tool.as_ref(), &tool_use); + event_stream.send_tool_call(&tool_use.id, title, kind, tool_use.input.clone()); last_message .content - .push(MessageContent::ToolUse(tool_use.clone())); + .push(AgentMessageContent::ToolUse(tool_use.clone())); } else { - event_stream.send_tool_call_update( + event_stream.update_tool_call_fields( &tool_use.id, acp::ToolCallUpdateFields { + title: Some(title.into()), + kind: Some(kind), raw_input: Some(tool_use.input.clone()), ..Default::default() }, @@ -490,15 +1488,35 @@ impl Thread { })); }; - let tool_result = self.run_tool(tool, tool_use.clone(), event_stream.clone(), cx); + let fs = self.project.read(cx).fs().clone(); + let tool_event_stream = + ToolCallEventStream::new(tool_use.id.clone(), event_stream.clone(), Some(fs)); + tool_event_stream.update_fields(acp::ToolCallUpdateFields { + status: Some(acp::ToolCallStatus::InProgress), + ..Default::default() + }); + let supports_images = self.model().is_some_and(|model| model.supports_images()); + let tool_result = tool.run(tool_use.input, tool_event_stream, cx); + log::info!("Running tool {}", tool_use.name); Some(cx.foreground_executor().spawn(async move { - match tool_result.await { - Ok(tool_output) => LanguageModelToolResult { + let tool_result = tool_result.await.and_then(|output| { + if let LanguageModelToolResultContent::Image(_) = &output.llm_output + && !supports_images + { + return Err(anyhow!( + "Attempted to read an image, but this model doesn't support it.", + )); + } + Ok(output) + }); + + match tool_result { + Ok(output) => LanguageModelToolResult { tool_use_id: tool_use.id, tool_name: tool_use.name, is_error: false, - content: LanguageModelToolResultContent::Text(Arc::from(tool_output)), - output: None, + content: output.llm_output, + output: Some(output.raw_output), }, Err(error) => LanguageModelToolResult { tool_use_id: tool_use.id, @@ -511,24 +1529,6 @@ impl Thread { })) } - fn run_tool( - &self, - tool: Arc, - tool_use: LanguageModelToolUse, - event_stream: AgentResponseEventStream, - cx: &mut Context, - ) -> Task> { - cx.spawn(async move |_this, cx| { - let tool_event_stream = ToolCallEventStream::new(tool_use.id, event_stream); - tool_event_stream.send_update(acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::InProgress), - ..Default::default() - }); - cx.update(|cx| tool.run(tool_use.input, tool_event_stream, cx))? - .await - }) - } - fn handle_tool_use_json_parse_error_event( &mut self, tool_use_id: LanguageModelToolUseId, @@ -546,126 +1546,459 @@ impl Thread { } } - /// Guarantees the last message is from the assistant and returns a mutable reference. - fn last_assistant_message(&mut self) -> &mut AgentMessage { - if self - .messages - .last() - .map_or(true, |m| m.role != Role::Assistant) - { - self.messages.push(AgentMessage { - role: Role::Assistant, - content: Vec::new(), - }); - } - self.messages.last_mut().unwrap() + fn update_model_request_usage(&self, amount: usize, limit: UsageLimit, cx: &mut Context) { + self.project + .read(cx) + .user_store() + .update(cx, |user_store, cx| { + user_store.update_model_request_usage( + ModelRequestUsage(RequestUsage { + amount: amount as i32, + limit, + }), + cx, + ) + }); + } + + pub fn title(&self) -> SharedString { + self.title.clone().unwrap_or("New Thread".into()) + } + + pub fn summary(&mut self, cx: &mut Context) -> Task> { + if let Some(summary) = self.summary.as_ref() { + return Task::ready(Ok(summary.clone())); + } + let Some(model) = self.summarization_model.clone() else { + return Task::ready(Err(anyhow!("No summarization model available"))); + }; + let mut request = LanguageModelRequest { + intent: Some(CompletionIntent::ThreadContextSummarization), + temperature: AgentSettings::temperature_for_model(&model, cx), + ..Default::default() + }; + + for message in &self.messages { + request.messages.extend(message.to_request()); + } + + request.messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()], + cache: false, + }); + cx.spawn(async move |this, cx| { + let mut summary = String::new(); + let mut messages = model.stream_completion(request, cx).await?; + while let Some(event) = messages.next().await { + let event = event?; + let text = match event { + LanguageModelCompletionEvent::Text(text) => text, + LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::UsageUpdated { amount, limit }, + ) => { + this.update(cx, |thread, cx| { + thread.update_model_request_usage(amount, limit, cx); + })?; + continue; + } + _ => continue, + }; + + let mut lines = text.lines(); + summary.extend(lines.next()); + } + + log::info!("Setting summary: {}", summary); + let summary = SharedString::from(summary); + + this.update(cx, |this, cx| { + this.summary = Some(summary.clone()); + cx.notify() + })?; + + Ok(summary) + }) + } + + fn update_title( + &mut self, + event_stream: &ThreadEventStream, + cx: &mut Context, + ) -> Task> { + log::info!( + "Generating title with model: {:?}", + self.summarization_model.as_ref().map(|model| model.name()) + ); + let Some(model) = self.summarization_model.clone() else { + return Task::ready(Ok(())); + }; + let event_stream = event_stream.clone(); + let mut request = LanguageModelRequest { + intent: Some(CompletionIntent::ThreadSummarization), + temperature: AgentSettings::temperature_for_model(&model, cx), + ..Default::default() + }; + + for message in &self.messages { + request.messages.extend(message.to_request()); + } + + request.messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![SUMMARIZE_THREAD_PROMPT.into()], + cache: false, + }); + cx.spawn(async move |this, cx| { + let mut title = String::new(); + let mut messages = model.stream_completion(request, cx).await?; + while let Some(event) = messages.next().await { + let event = event?; + let text = match event { + LanguageModelCompletionEvent::Text(text) => text, + LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::UsageUpdated { amount, limit }, + ) => { + this.update(cx, |thread, cx| { + thread.update_model_request_usage(amount, limit, cx); + })?; + continue; + } + _ => continue, + }; + + let mut lines = text.lines(); + title.extend(lines.next()); + + // Stop if the LLM generated multiple lines. + if lines.next().is_some() { + break; + } + } + + log::info!("Setting title: {}", title); + + this.update(cx, |this, cx| { + let title = SharedString::from(title); + event_stream.send_title_update(title.clone()); + this.title = Some(title); + cx.notify(); + }) + }) + } + + fn last_user_message(&self) -> Option<&UserMessage> { + self.messages + .iter() + .rev() + .find_map(|message| match message { + Message::User(user_message) => Some(user_message), + Message::Agent(_) => None, + Message::Resume => None, + }) } - /// Guarantees the last message is from the user and returns a mutable reference. - fn last_user_message(&mut self) -> &mut AgentMessage { - if self.messages.last().map_or(true, |m| m.role != Role::User) { - self.messages.push(AgentMessage { - role: Role::User, - content: Vec::new(), - }); + fn pending_message(&mut self) -> &mut AgentMessage { + self.pending_message.get_or_insert_default() + } + + fn flush_pending_message(&mut self, cx: &mut Context) { + let Some(mut message) = self.pending_message.take() else { + return; + }; + + for content in &message.content { + let AgentMessageContent::ToolUse(tool_use) = content else { + continue; + }; + + if !message.tool_results.contains_key(&tool_use.id) { + message.tool_results.insert( + tool_use.id.clone(), + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name.clone(), + is_error: true, + content: LanguageModelToolResultContent::Text(TOOL_CANCELED_MESSAGE.into()), + output: None, + }, + ); + } } - self.messages.last_mut().unwrap() + + self.messages.push(Message::Agent(message)); + self.updated_at = Utc::now(); + self.summary = None; + cx.notify() } - fn build_completion_request( + pub(crate) fn build_completion_request( &self, completion_intent: CompletionIntent, cx: &mut App, - ) -> LanguageModelRequest { + ) -> Result { + let model = self.model().context("No language model configured")?; + log::debug!("Building completion request"); log::debug!("Completion intent: {:?}", completion_intent); log::debug!("Completion mode: {:?}", self.completion_mode); - let messages = self.build_request_messages(); + let messages = self.build_request_messages(cx); log::info!("Request will include {} messages", messages.len()); - let tools: Vec = self - .tools - .values() - .filter_map(|tool| { - let tool_name = tool.name().to_string(); - log::trace!("Including tool: {}", tool_name); - Some(LanguageModelRequestTool { - name: tool_name, - description: tool.description(cx).to_string(), - input_schema: tool - .input_schema(self.selected_model.tool_input_format()) - .log_err()?, + let tools = if let Some(tools) = self.tools(cx).log_err() { + tools + .filter_map(|tool| { + let tool_name = tool.name().to_string(); + log::trace!("Including tool: {}", tool_name); + Some(LanguageModelRequestTool { + name: tool_name, + description: tool.description().to_string(), + input_schema: tool.input_schema(model.tool_input_format()).log_err()?, + }) }) - }) - .collect(); + .collect() + } else { + Vec::new() + }; log::info!("Request includes {} tools", tools.len()); let request = LanguageModelRequest { - thread_id: None, - prompt_id: None, + thread_id: Some(self.id.to_string()), + prompt_id: Some(self.prompt_id.to_string()), intent: Some(completion_intent), - mode: Some(self.completion_mode), + mode: Some(self.completion_mode.into()), messages, tools, tool_choice: None, stop: Vec::new(), - temperature: None, + temperature: AgentSettings::temperature_for_model(model, cx), thinking_allowed: true, }; log::debug!("Completion request built successfully"); - request + Ok(request) + } + + fn tools<'a>(&'a self, cx: &'a App) -> Result>> { + let model = self.model().context("No language model configured")?; + + let profile = AgentSettings::get_global(cx) + .profiles + .get(&self.profile_id) + .context("profile not found")?; + let provider_id = model.provider_id(); + + Ok(self + .tools + .iter() + .filter(move |(_, tool)| tool.supported_provider(&provider_id)) + .filter_map(|(tool_name, tool)| { + if profile.is_tool_enabled(tool_name) { + Some(tool) + } else { + None + } + }) + .chain(self.context_server_registry.read(cx).servers().flat_map( + |(server_id, tools)| { + tools.iter().filter_map(|(tool_name, tool)| { + if profile.is_context_server_tool_enabled(&server_id.0, tool_name) { + Some(tool) + } else { + None + } + }) + }, + ))) } - fn build_request_messages(&self) -> Vec { + fn build_request_messages(&self, cx: &App) -> Vec { log::trace!( "Building request messages from {} thread messages", self.messages.len() ); + let mut messages = vec![self.build_system_message(cx)]; + for message in &self.messages { + messages.extend(message.to_request()); + } + + if let Some(message) = self.pending_message.as_ref() { + messages.extend(message.to_request()); + } + + if let Some(last_user_message) = messages + .iter_mut() + .rev() + .find(|message| message.role == Role::User) + { + last_user_message.cache = true; + } - let messages = Some(self.build_system_message()) - .iter() - .chain(self.messages.iter()) - .map(|message| { - log::trace!( - " - {} message with {} content items", - match message.role { - Role::System => "System", - Role::User => "User", - Role::Assistant => "Assistant", - }, - message.content.len() - ); - LanguageModelRequestMessage { - role: message.role, - content: message.content.clone(), - cache: false, - } - }) - .collect(); messages } pub fn to_markdown(&self) -> String { let mut markdown = String::new(); - for message in &self.messages { + for (ix, message) in self.messages.iter().enumerate() { + if ix > 0 { + markdown.push('\n'); + } markdown.push_str(&message.to_markdown()); } + + if let Some(message) = self.pending_message.as_ref() { + markdown.push('\n'); + markdown.push_str(&message.to_markdown()); + } + markdown } + + fn advance_prompt_id(&mut self) { + self.prompt_id = PromptId::new(); + } + + fn retry_strategy_for(error: &LanguageModelCompletionError) -> Option { + use LanguageModelCompletionError::*; + use http_client::StatusCode; + + // General strategy here: + // - If retrying won't help (e.g. invalid API key or payload too large), return None so we don't retry at all. + // - If it's a time-based issue (e.g. server overloaded, rate limit exceeded), retry up to 4 times with exponential backoff. + // - If it's an issue that *might* be fixed by retrying (e.g. internal server error), retry up to 3 times. + match error { + HttpResponseError { + status_code: StatusCode::TOO_MANY_REQUESTS, + .. + } => Some(RetryStrategy::ExponentialBackoff { + initial_delay: BASE_RETRY_DELAY, + max_attempts: MAX_RETRY_ATTEMPTS, + }), + ServerOverloaded { retry_after, .. } | RateLimitExceeded { retry_after, .. } => { + Some(RetryStrategy::Fixed { + delay: retry_after.unwrap_or(BASE_RETRY_DELAY), + max_attempts: MAX_RETRY_ATTEMPTS, + }) + } + UpstreamProviderError { + status, + retry_after, + .. + } => match *status { + StatusCode::TOO_MANY_REQUESTS | StatusCode::SERVICE_UNAVAILABLE => { + Some(RetryStrategy::Fixed { + delay: retry_after.unwrap_or(BASE_RETRY_DELAY), + max_attempts: MAX_RETRY_ATTEMPTS, + }) + } + StatusCode::INTERNAL_SERVER_ERROR => Some(RetryStrategy::Fixed { + delay: retry_after.unwrap_or(BASE_RETRY_DELAY), + // Internal Server Error could be anything, retry up to 3 times. + max_attempts: 3, + }), + status => { + // There is no StatusCode variant for the unofficial HTTP 529 ("The service is overloaded"), + // but we frequently get them in practice. See https://http.dev/529 + if status.as_u16() == 529 { + Some(RetryStrategy::Fixed { + delay: retry_after.unwrap_or(BASE_RETRY_DELAY), + max_attempts: MAX_RETRY_ATTEMPTS, + }) + } else { + Some(RetryStrategy::Fixed { + delay: retry_after.unwrap_or(BASE_RETRY_DELAY), + max_attempts: 2, + }) + } + } + }, + ApiInternalServerError { .. } => Some(RetryStrategy::Fixed { + delay: BASE_RETRY_DELAY, + max_attempts: 3, + }), + ApiReadResponseError { .. } + | HttpSend { .. } + | DeserializeResponse { .. } + | BadRequestFormat { .. } => Some(RetryStrategy::Fixed { + delay: BASE_RETRY_DELAY, + max_attempts: 3, + }), + // Retrying these errors definitely shouldn't help. + HttpResponseError { + status_code: + StatusCode::PAYLOAD_TOO_LARGE | StatusCode::FORBIDDEN | StatusCode::UNAUTHORIZED, + .. + } + | AuthenticationError { .. } + | PermissionError { .. } + | NoApiKey { .. } + | ApiEndpointNotFound { .. } + | PromptTooLarge { .. } => None, + // These errors might be transient, so retry them + SerializeRequest { .. } | BuildRequestBody { .. } => Some(RetryStrategy::Fixed { + delay: BASE_RETRY_DELAY, + max_attempts: 1, + }), + // Retry all other 4xx and 5xx errors once. + HttpResponseError { status_code, .. } + if status_code.is_client_error() || status_code.is_server_error() => + { + Some(RetryStrategy::Fixed { + delay: BASE_RETRY_DELAY, + max_attempts: 3, + }) + } + Other(err) + if err.is::() + || err.is::() => + { + // Retrying won't help for Payment Required or Model Request Limit errors (where + // the user must upgrade to usage-based billing to get more requests, or else wait + // for a significant amount of time for the request limit to reset). + None + } + // Conservatively assume that any other errors are non-retryable + HttpResponseError { .. } | Other(..) => Some(RetryStrategy::Fixed { + delay: BASE_RETRY_DELAY, + max_attempts: 2, + }), + } + } +} + +struct RunningTurn { + /// Holds the task that handles agent interaction until the end of the turn. + /// Survives across multiple requests as the model performs tool calls and + /// we run tools, report their results. + _task: Task<()>, + /// The current event stream for the running turn. Used to report a final + /// cancellation event if we cancel the turn. + event_stream: ThreadEventStream, +} + +impl RunningTurn { + fn cancel(self) { + log::debug!("Cancelling in progress turn"); + self.event_stream.send_canceled(); + } } +pub struct TokenUsageUpdated(pub Option); + +impl EventEmitter for Thread {} + pub trait AgentTool where Self: 'static + Sized, { type Input: for<'de> Deserialize<'de> + Serialize + JsonSchema; + type Output: for<'de> Deserialize<'de> + Serialize + Into; fn name(&self) -> SharedString; - fn description(&self, _cx: &mut App) -> SharedString { + fn description(&self) -> SharedString { let schema = schemars::schema_for!(Self::Input); SharedString::new( schema @@ -678,21 +2011,17 @@ where fn kind(&self) -> acp::ToolKind; /// The initial tool title to display. Can be updated during the tool run. - fn initial_title(&self, input: Self::Input) -> SharedString; + fn initial_title(&self, input: Result) -> SharedString; /// Returns the JSON schema that describes the tool's input. - fn input_schema(&self) -> Schema { - schemars::schema_for!(Self::Input) + fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Schema { + crate::tool_schema::root_schema_for::(format) } - /// Allows the tool to authorize a given tool call with the user if necessary - fn authorize( - &self, - input: Self::Input, - event_stream: ToolCallEventStream, - ) -> impl use + Future> { - let json_input = serde_json::json!(&input); - event_stream.authorize(self.initial_title(input).into(), self.kind(), json_input) + /// Some tools rely on a provider for the underlying billing or other reasons. + /// Allow the tool to check if they are compatible, or should be filtered out. + fn supported_provider(&self, _provider: &LanguageModelProviderId) -> bool { + true } /// Runs the tool with the provided input. @@ -701,7 +2030,18 @@ where input: Self::Input, event_stream: ToolCallEventStream, cx: &mut App, - ) -> Task>; + ) -> Task>; + + /// Emits events for a previous execution of the tool. + fn replay( + &self, + _input: Self::Input, + _output: Self::Output, + _event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> Result<()> { + Ok(()) + } fn erase(self) -> Arc { Arc::new(Erased(Arc::new(self))) @@ -710,18 +2050,33 @@ where pub struct Erased(T); +pub struct AgentToolOutput { + pub llm_output: LanguageModelToolResultContent, + pub raw_output: serde_json::Value, +} + pub trait AnyAgentTool { fn name(&self) -> SharedString; - fn description(&self, cx: &mut App) -> SharedString; + fn description(&self) -> SharedString; fn kind(&self) -> acp::ToolKind; - fn initial_title(&self, input: serde_json::Value) -> Result; + fn initial_title(&self, input: serde_json::Value) -> SharedString; fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result; + fn supported_provider(&self, _provider: &LanguageModelProviderId) -> bool { + true + } fn run( self: Arc, input: serde_json::Value, event_stream: ToolCallEventStream, cx: &mut App, - ) -> Task>; + ) -> Task>; + fn replay( + &self, + input: serde_json::Value, + output: serde_json::Value, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Result<()>; } impl AnyAgentTool for Erased> @@ -732,111 +2087,102 @@ where self.0.name() } - fn description(&self, cx: &mut App) -> SharedString { - self.0.description(cx) + fn description(&self) -> SharedString { + self.0.description() } fn kind(&self) -> agent_client_protocol::ToolKind { self.0.kind() } - fn initial_title(&self, input: serde_json::Value) -> Result { - let parsed_input = serde_json::from_value(input)?; - Ok(self.0.initial_title(parsed_input)) + fn initial_title(&self, input: serde_json::Value) -> SharedString { + let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input); + self.0.initial_title(parsed_input) } fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - let mut json = serde_json::to_value(self.0.input_schema())?; + let mut json = serde_json::to_value(self.0.input_schema(format))?; adapt_schema_to_format(&mut json, format)?; Ok(json) } + fn supported_provider(&self, provider: &LanguageModelProviderId) -> bool { + self.0.supported_provider(provider) + } + fn run( self: Arc, input: serde_json::Value, event_stream: ToolCallEventStream, cx: &mut App, - ) -> Task> { - let parsed_input: Result = serde_json::from_value(input).map_err(Into::into); - match parsed_input { - Ok(input) => self.0.clone().run(input, event_stream, cx), - Err(error) => Task::ready(Err(anyhow!(error))), - } + ) -> Task> { + cx.spawn(async move |cx| { + let input = serde_json::from_value(input)?; + let output = cx + .update(|cx| self.0.clone().run(input, event_stream, cx))? + .await?; + let raw_output = serde_json::to_value(&output)?; + Ok(AgentToolOutput { + llm_output: output.into(), + raw_output, + }) + }) + } + + fn replay( + &self, + input: serde_json::Value, + output: serde_json::Value, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Result<()> { + let input = serde_json::from_value(input)?; + let output = serde_json::from_value(output)?; + self.0.replay(input, output, event_stream, cx) } } #[derive(Clone)] -struct AgentResponseEventStream( - mpsc::UnboundedSender>, -); +struct ThreadEventStream(mpsc::UnboundedSender>); -impl AgentResponseEventStream { - fn send_text(&self, text: &str) { +impl ThreadEventStream { + fn send_title_update(&self, text: SharedString) { self.0 - .unbounded_send(Ok(AgentResponseEvent::Text(text.to_string()))) + .unbounded_send(Ok(ThreadEvent::TitleUpdate(text))) .ok(); } - fn send_thinking(&self, text: &str) { + fn send_user_message(&self, message: &UserMessage) { self.0 - .unbounded_send(Ok(AgentResponseEvent::Thinking(text.to_string()))) + .unbounded_send(Ok(ThreadEvent::UserMessage(message.clone()))) .ok(); } - fn authorize_tool_call( - &self, - id: &LanguageModelToolUseId, - title: String, - kind: acp::ToolKind, - input: serde_json::Value, - ) -> impl use<> + Future> { - let (response_tx, response_rx) = oneshot::channel(); + fn send_text(&self, text: &str) { self.0 - .unbounded_send(Ok(AgentResponseEvent::ToolCallAuthorization( - ToolCallAuthorization { - tool_call: Self::initial_tool_call(id, title, kind, input), - options: vec![ - acp::PermissionOption { - id: acp::PermissionOptionId("always_allow".into()), - name: "Always Allow".into(), - kind: acp::PermissionOptionKind::AllowAlways, - }, - acp::PermissionOption { - id: acp::PermissionOptionId("allow".into()), - name: "Allow".into(), - kind: acp::PermissionOptionKind::AllowOnce, - }, - acp::PermissionOption { - id: acp::PermissionOptionId("deny".into()), - name: "Deny".into(), - kind: acp::PermissionOptionKind::RejectOnce, - }, - ], - response: response_tx, - }, - ))) + .unbounded_send(Ok(ThreadEvent::AgentText(text.to_string()))) + .ok(); + } + + fn send_thinking(&self, text: &str) { + self.0 + .unbounded_send(Ok(ThreadEvent::AgentThinking(text.to_string()))) .ok(); - async move { - match response_rx.await?.0.as_ref() { - "allow" | "always_allow" => Ok(()), - _ => Err(anyhow!("Permission to run tool denied by user")), - } - } } fn send_tool_call( &self, - tool: Option<&Arc>, - tool_use: &LanguageModelToolUse, + id: &LanguageModelToolUseId, + title: SharedString, + kind: acp::ToolKind, + input: serde_json::Value, ) { self.0 - .unbounded_send(Ok(AgentResponseEvent::ToolCall(Self::initial_tool_call( - &tool_use.id, - tool.and_then(|t| t.initial_title(tool_use.input.clone()).ok()) - .map(|i| i.into()) - .unwrap_or_else(|| tool_use.name.to_string()), - tool.map(|t| t.kind()).unwrap_or(acp::ToolKind::Other), - tool_use.input.clone(), + .unbounded_send(Ok(ThreadEvent::ToolCall(Self::initial_tool_call( + id, + title.to_string(), + kind, + input, )))) .ok(); } @@ -859,97 +2205,306 @@ impl AgentResponseEventStream { } } - fn send_tool_call_update( + fn update_tool_call_fields( &self, tool_use_id: &LanguageModelToolUseId, fields: acp::ToolCallUpdateFields, ) { self.0 - .unbounded_send(Ok(AgentResponseEvent::ToolCallUpdate( + .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( acp::ToolCallUpdate { id: acp::ToolCallId(tool_use_id.to_string().into()), fields, - }, + } + .into(), ))) .ok(); } + fn send_retry(&self, status: acp_thread::RetryStatus) { + self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok(); + } + fn send_stop(&self, reason: StopReason) { match reason { StopReason::EndTurn => { self.0 - .unbounded_send(Ok(AgentResponseEvent::Stop(acp::StopReason::EndTurn))) + .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::EndTurn))) .ok(); } StopReason::MaxTokens => { self.0 - .unbounded_send(Ok(AgentResponseEvent::Stop(acp::StopReason::MaxTokens))) + .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::MaxTokens))) .ok(); } StopReason::Refusal => { self.0 - .unbounded_send(Ok(AgentResponseEvent::Stop(acp::StopReason::Refusal))) + .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Refusal))) .ok(); } StopReason::ToolUse => {} } } - fn send_error(&self, error: LanguageModelCompletionError) { - self.0.unbounded_send(Err(error)).ok(); + fn send_canceled(&self) { + self.0 + .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled))) + .ok(); + } + + fn send_error(&self, error: impl Into) { + self.0.unbounded_send(Err(error.into())).ok(); } } #[derive(Clone)] pub struct ToolCallEventStream { tool_use_id: LanguageModelToolUseId, - stream: AgentResponseEventStream, + stream: ThreadEventStream, + fs: Option>, } impl ToolCallEventStream { - fn new(tool_use_id: LanguageModelToolUseId, stream: AgentResponseEventStream) -> Self { + #[cfg(test)] + pub fn test() -> (Self, ToolCallEventStreamReceiver) { + let (events_tx, events_rx) = mpsc::unbounded::>(); + + let stream = ToolCallEventStream::new("test_id".into(), ThreadEventStream(events_tx), None); + + (stream, ToolCallEventStreamReceiver(events_rx)) + } + + fn new( + tool_use_id: LanguageModelToolUseId, + stream: ThreadEventStream, + fs: Option>, + ) -> Self { Self { tool_use_id, stream, + fs, } } - pub fn send_update(&self, fields: acp::ToolCallUpdateFields) { - self.stream.send_tool_call_update(&self.tool_use_id, fields); + pub fn update_fields(&self, fields: acp::ToolCallUpdateFields) { + self.stream + .update_tool_call_fields(&self.tool_use_id, fields); } - pub fn authorize( - &self, - title: String, - kind: acp::ToolKind, - input: serde_json::Value, - ) -> impl use<> + Future> { + pub fn update_diff(&self, diff: Entity) { + self.stream + .0 + .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( + acp_thread::ToolCallUpdateDiff { + id: acp::ToolCallId(self.tool_use_id.to_string().into()), + diff, + } + .into(), + ))) + .ok(); + } + + pub fn update_terminal(&self, terminal: Entity) { self.stream - .authorize_tool_call(&self.tool_use_id, title, kind, input) + .0 + .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( + acp_thread::ToolCallUpdateTerminal { + id: acp::ToolCallId(self.tool_use_id.to_string().into()), + terminal, + } + .into(), + ))) + .ok(); + } + + pub fn authorize(&self, title: impl Into, cx: &mut App) -> Task> { + if agent_settings::AgentSettings::get_global(cx).always_allow_tool_actions { + return Task::ready(Ok(())); + } + + let (response_tx, response_rx) = oneshot::channel(); + self.stream + .0 + .unbounded_send(Ok(ThreadEvent::ToolCallAuthorization( + ToolCallAuthorization { + tool_call: acp::ToolCallUpdate { + id: acp::ToolCallId(self.tool_use_id.to_string().into()), + fields: acp::ToolCallUpdateFields { + title: Some(title.into()), + ..Default::default() + }, + }, + options: vec![ + acp::PermissionOption { + id: acp::PermissionOptionId("always_allow".into()), + name: "Always Allow".into(), + kind: acp::PermissionOptionKind::AllowAlways, + }, + acp::PermissionOption { + id: acp::PermissionOptionId("allow".into()), + name: "Allow".into(), + kind: acp::PermissionOptionKind::AllowOnce, + }, + acp::PermissionOption { + id: acp::PermissionOptionId("deny".into()), + name: "Deny".into(), + kind: acp::PermissionOptionKind::RejectOnce, + }, + ], + response: response_tx, + }, + ))) + .ok(); + let fs = self.fs.clone(); + cx.spawn(async move |cx| match response_rx.await?.0.as_ref() { + "always_allow" => { + if let Some(fs) = fs.clone() { + cx.update(|cx| { + update_settings_file::(fs, cx, |settings, _| { + settings.set_always_allow_tool_actions(true); + }); + })?; + } + + Ok(()) + } + "allow" => Ok(()), + _ => Err(anyhow!("Permission to run tool denied by user")), + }) + } +} + +#[cfg(test)] +pub struct ToolCallEventStreamReceiver(mpsc::UnboundedReceiver>); + +#[cfg(test)] +impl ToolCallEventStreamReceiver { + pub async fn expect_authorization(&mut self) -> ToolCallAuthorization { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallAuthorization(auth))) = event { + auth + } else { + panic!("Expected ToolCallAuthorization but got: {:?}", event); + } + } + + pub async fn expect_terminal(&mut self) -> Entity { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateTerminal( + update, + )))) = event + { + update.terminal + } else { + panic!("Expected terminal but got: {:?}", event); + } } } #[cfg(test)] -pub struct TestToolCallEventStream { - stream: ToolCallEventStream, - _events_rx: mpsc::UnboundedReceiver>, +impl std::ops::Deref for ToolCallEventStreamReceiver { + type Target = mpsc::UnboundedReceiver>; + + fn deref(&self) -> &Self::Target { + &self.0 + } } #[cfg(test)] -impl TestToolCallEventStream { - pub fn new() -> Self { - let (events_tx, events_rx) = - mpsc::unbounded::>(); +impl std::ops::DerefMut for ToolCallEventStreamReceiver { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} - let stream = ToolCallEventStream::new("test".into(), AgentResponseEventStream(events_tx)); +impl From<&str> for UserMessageContent { + fn from(text: &str) -> Self { + Self::Text(text.into()) + } +} - Self { - stream, - _events_rx: events_rx, +impl From for UserMessageContent { + fn from(value: acp::ContentBlock) -> Self { + match value { + acp::ContentBlock::Text(text_content) => Self::Text(text_content.text), + acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)), + acp::ContentBlock::Audio(_) => { + // TODO + Self::Text("[audio]".to_string()) + } + acp::ContentBlock::ResourceLink(resource_link) => { + match MentionUri::parse(&resource_link.uri) { + Ok(uri) => Self::Mention { + uri, + content: String::new(), + }, + Err(err) => { + log::error!("Failed to parse mention link: {}", err); + Self::Text(format!("[{}]({})", resource_link.name, resource_link.uri)) + } + } + } + acp::ContentBlock::Resource(resource) => match resource.resource { + acp::EmbeddedResourceResource::TextResourceContents(resource) => { + match MentionUri::parse(&resource.uri) { + Ok(uri) => Self::Mention { + uri, + content: resource.text, + }, + Err(err) => { + log::error!("Failed to parse mention link: {}", err); + Self::Text( + MarkdownCodeBlock { + tag: &resource.uri, + text: &resource.text, + } + .to_string(), + ) + } + } + } + acp::EmbeddedResourceResource::BlobResourceContents(_) => { + // TODO + Self::Text("[blob]".to_string()) + } + }, } } +} + +impl From for acp::ContentBlock { + fn from(content: UserMessageContent) -> Self { + match content { + UserMessageContent::Text(text) => acp::ContentBlock::Text(acp::TextContent { + text, + annotations: None, + }), + UserMessageContent::Image(image) => acp::ContentBlock::Image(acp::ImageContent { + data: image.source.to_string(), + mime_type: "image/png".to_string(), + annotations: None, + uri: None, + }), + UserMessageContent::Mention { uri, content } => { + acp::ContentBlock::Resource(acp::EmbeddedResource { + resource: acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents { + mime_type: None, + text: content, + uri: uri.to_uri().to_string(), + }, + ), + annotations: None, + }) + } + } + } +} - pub fn stream(&self) -> ToolCallEventStream { - self.stream.clone() +fn convert_image(image_content: acp::ImageContent) -> LanguageModelImage { + LanguageModelImage { + source: image_content.data.into(), + // TODO: make this optional? + size: gpui::Size::new(0.into(), 0.into()), } } diff --git a/crates/agent2/src/tool_schema.rs b/crates/agent2/src/tool_schema.rs new file mode 100644 index 0000000000000000000000000000000000000000..f608336b416a72885e52abba58ef472029421e4f --- /dev/null +++ b/crates/agent2/src/tool_schema.rs @@ -0,0 +1,43 @@ +use language_model::LanguageModelToolSchemaFormat; +use schemars::{ + JsonSchema, Schema, + generate::SchemaSettings, + transform::{Transform, transform_subschemas}, +}; + +pub(crate) fn root_schema_for(format: LanguageModelToolSchemaFormat) -> Schema { + let mut generator = match format { + LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(), + LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3() + .with(|settings| { + settings.meta_schema = None; + settings.inline_subschemas = true; + }) + .with_transform(ToJsonSchemaSubsetTransform) + .into_generator(), + }; + generator.root_schema_for::() +} + +#[derive(Debug, Clone)] +struct ToJsonSchemaSubsetTransform; + +impl Transform for ToJsonSchemaSubsetTransform { + fn transform(&mut self, schema: &mut Schema) { + // Ensure that the type field is not an array, this happens when we use + // Option, the type will be [T, "null"]. + if let Some(type_field) = schema.get_mut("type") + && let Some(types) = type_field.as_array() + && let Some(first_type) = types.first() + { + *type_field = first_type.clone(); + } + + // oneOf is not supported, use anyOf instead + if let Some(one_of) = schema.remove("oneOf") { + schema.insert("anyOf".to_string(), one_of); + } + + transform_subschemas(self, schema); + } +} diff --git a/crates/agent2/src/tools.rs b/crates/agent2/src/tools.rs index 240614c263b4017c2f2aa2353370479376e0a415..d1f2b3b1c7ad3ed7ade2324c61c1e72d7e7e4006 100644 --- a/crates/agent2/src/tools.rs +++ b/crates/agent2/src/tools.rs @@ -1,7 +1,35 @@ +mod context_server_registry; +mod copy_path_tool; +mod create_directory_tool; +mod delete_path_tool; +mod diagnostics_tool; +mod edit_file_tool; +mod fetch_tool; mod find_path_tool; +mod grep_tool; +mod list_directory_tool; +mod move_path_tool; +mod now_tool; +mod open_tool; mod read_file_tool; +mod terminal_tool; mod thinking_tool; +mod web_search_tool; +pub use context_server_registry::*; +pub use copy_path_tool::*; +pub use create_directory_tool::*; +pub use delete_path_tool::*; +pub use diagnostics_tool::*; +pub use edit_file_tool::*; +pub use fetch_tool::*; pub use find_path_tool::*; +pub use grep_tool::*; +pub use list_directory_tool::*; +pub use move_path_tool::*; +pub use now_tool::*; +pub use open_tool::*; pub use read_file_tool::*; +pub use terminal_tool::*; pub use thinking_tool::*; +pub use web_search_tool::*; diff --git a/crates/agent2/src/tools/context_server_registry.rs b/crates/agent2/src/tools/context_server_registry.rs new file mode 100644 index 0000000000000000000000000000000000000000..c7963fa6e6e14ffa34d076dc2ca5dbdc23c78cab --- /dev/null +++ b/crates/agent2/src/tools/context_server_registry.rs @@ -0,0 +1,239 @@ +use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream}; +use agent_client_protocol::ToolKind; +use anyhow::{Result, anyhow, bail}; +use collections::{BTreeMap, HashMap}; +use context_server::ContextServerId; +use gpui::{App, Context, Entity, SharedString, Task}; +use project::context_server_store::{ContextServerStatus, ContextServerStore}; +use std::sync::Arc; +use util::ResultExt; + +pub struct ContextServerRegistry { + server_store: Entity, + registered_servers: HashMap, + _subscription: gpui::Subscription, +} + +struct RegisteredContextServer { + tools: BTreeMap>, + load_tools: Task>, +} + +impl ContextServerRegistry { + pub fn new(server_store: Entity, cx: &mut Context) -> Self { + let mut this = Self { + server_store: server_store.clone(), + registered_servers: HashMap::default(), + _subscription: cx.subscribe(&server_store, Self::handle_context_server_store_event), + }; + for server in server_store.read(cx).running_servers() { + this.reload_tools_for_server(server.id(), cx); + } + this + } + + pub fn servers( + &self, + ) -> impl Iterator< + Item = ( + &ContextServerId, + &BTreeMap>, + ), + > { + self.registered_servers + .iter() + .map(|(id, server)| (id, &server.tools)) + } + + fn reload_tools_for_server(&mut self, server_id: ContextServerId, cx: &mut Context) { + let Some(server) = self.server_store.read(cx).get_running_server(&server_id) else { + return; + }; + let Some(client) = server.client() else { + return; + }; + if !client.capable(context_server::protocol::ServerCapability::Tools) { + return; + } + + let registered_server = + self.registered_servers + .entry(server_id.clone()) + .or_insert(RegisteredContextServer { + tools: BTreeMap::default(), + load_tools: Task::ready(Ok(())), + }); + registered_server.load_tools = cx.spawn(async move |this, cx| { + let response = client + .request::(()) + .await; + + this.update(cx, |this, cx| { + let Some(registered_server) = this.registered_servers.get_mut(&server_id) else { + return; + }; + + registered_server.tools.clear(); + if let Some(response) = response.log_err() { + for tool in response.tools { + let tool = Arc::new(ContextServerTool::new( + this.server_store.clone(), + server.id(), + tool, + )); + registered_server.tools.insert(tool.name(), tool); + } + cx.notify(); + } + }) + }); + } + + fn handle_context_server_store_event( + &mut self, + _: Entity, + event: &project::context_server_store::Event, + cx: &mut Context, + ) { + match event { + project::context_server_store::Event::ServerStatusChanged { server_id, status } => { + match status { + ContextServerStatus::Starting => {} + ContextServerStatus::Running => { + self.reload_tools_for_server(server_id.clone(), cx); + } + ContextServerStatus::Stopped | ContextServerStatus::Error(_) => { + self.registered_servers.remove(server_id); + cx.notify(); + } + } + } + } + } +} + +struct ContextServerTool { + store: Entity, + server_id: ContextServerId, + tool: context_server::types::Tool, +} + +impl ContextServerTool { + fn new( + store: Entity, + server_id: ContextServerId, + tool: context_server::types::Tool, + ) -> Self { + Self { + store, + server_id, + tool, + } + } +} + +impl AnyAgentTool for ContextServerTool { + fn name(&self) -> SharedString { + self.tool.name.clone().into() + } + + fn description(&self) -> SharedString { + self.tool.description.clone().unwrap_or_default().into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Other + } + + fn initial_title(&self, _input: serde_json::Value) -> SharedString { + format!("Run MCP tool `{}`", self.tool.name).into() + } + + fn input_schema( + &self, + format: language_model::LanguageModelToolSchemaFormat, + ) -> Result { + let mut schema = self.tool.input_schema.clone(); + assistant_tool::adapt_schema_to_format(&mut schema, format)?; + Ok(match schema { + serde_json::Value::Null => { + serde_json::json!({ "type": "object", "properties": [] }) + } + serde_json::Value::Object(map) if map.is_empty() => { + serde_json::json!({ "type": "object", "properties": [] }) + } + _ => schema, + }) + } + + fn run( + self: Arc, + input: serde_json::Value, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let Some(server) = self.store.read(cx).get_running_server(&self.server_id) else { + return Task::ready(Err(anyhow!("Context server not found"))); + }; + let tool_name = self.tool.name.clone(); + + cx.spawn(async move |_cx| { + let Some(protocol) = server.client() else { + bail!("Context server not initialized"); + }; + + let arguments = if let serde_json::Value::Object(map) = input { + Some(map.into_iter().collect()) + } else { + None + }; + + log::trace!( + "Running tool: {} with arguments: {:?}", + tool_name, + arguments + ); + let response = protocol + .request::( + context_server::types::CallToolParams { + name: tool_name, + arguments, + meta: None, + }, + ) + .await?; + + let mut result = String::new(); + for content in response.content { + match content { + context_server::types::ToolResponseContent::Text { text } => { + result.push_str(&text); + } + context_server::types::ToolResponseContent::Image { .. } => { + log::warn!("Ignoring image content from tool response"); + } + context_server::types::ToolResponseContent::Audio { .. } => { + log::warn!("Ignoring audio content from tool response"); + } + context_server::types::ToolResponseContent::Resource { .. } => { + log::warn!("Ignoring resource content from tool response"); + } + } + } + Ok(AgentToolOutput { + raw_output: result.clone().into(), + llm_output: result.into(), + }) + }) + } + + fn replay( + &self, + _input: serde_json::Value, + _output: serde_json::Value, + _event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> Result<()> { + Ok(()) + } +} diff --git a/crates/agent2/src/tools/copy_path_tool.rs b/crates/agent2/src/tools/copy_path_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..4b40a9842f69cb87977ae948c4f858a2540b2eb1 --- /dev/null +++ b/crates/agent2/src/tools/copy_path_tool.rs @@ -0,0 +1,111 @@ +use crate::{AgentTool, ToolCallEventStream}; +use agent_client_protocol::ToolKind; +use anyhow::{Context as _, Result, anyhow}; +use gpui::{App, AppContext, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use util::markdown::MarkdownInlineCode; + +/// Copies a file or directory in the project, and returns confirmation that the copy succeeded. +/// Directory contents will be copied recursively (like `cp -r`). +/// +/// This tool should be used when it's desirable to create a copy of a file or directory without modifying the original. +/// It's much more efficient than doing this by separately reading and then writing the file or directory's contents, so this tool should be preferred over that approach whenever copying is the goal. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct CopyPathToolInput { + /// The source path of the file or directory to copy. + /// If a directory is specified, its contents will be copied recursively (like `cp -r`). + /// + /// + /// If the project has the following files: + /// + /// - directory1/a/something.txt + /// - directory2/a/things.txt + /// - directory3/a/other.txt + /// + /// You can copy the first file by providing a source_path of "directory1/a/something.txt" + /// + pub source_path: String, + /// The destination path where the file or directory should be copied to. + /// + /// + /// To copy "directory1/a/something.txt" to "directory2/b/copy.txt", provide a destination_path of "directory2/b/copy.txt" + /// + pub destination_path: String, +} + +pub struct CopyPathTool { + project: Entity, +} + +impl CopyPathTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for CopyPathTool { + type Input = CopyPathToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "copy_path".into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Move + } + + fn initial_title(&self, input: Result) -> ui::SharedString { + if let Ok(input) = input { + let src = MarkdownInlineCode(&input.source_path); + let dest = MarkdownInlineCode(&input.destination_path); + format!("Copy {src} to {dest}").into() + } else { + "Copy path".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let copy_task = self.project.update(cx, |project, cx| { + match project + .find_project_path(&input.source_path, cx) + .and_then(|project_path| project.entry_for_path(&project_path, cx)) + { + Some(entity) => match project.find_project_path(&input.destination_path, cx) { + Some(project_path) => { + project.copy_entry(entity.id, None, project_path.path, cx) + } + None => Task::ready(Err(anyhow!( + "Destination path {} was outside the project.", + input.destination_path + ))), + }, + None => Task::ready(Err(anyhow!( + "Source path {} was not found in the project.", + input.source_path + ))), + } + }); + + cx.background_spawn(async move { + let _ = copy_task.await.with_context(|| { + format!( + "Copying {} to {}", + input.source_path, input.destination_path + ) + })?; + Ok(format!( + "Copied {} to {}", + input.source_path, input.destination_path + )) + }) + } +} diff --git a/crates/agent2/src/tools/create_directory_tool.rs b/crates/agent2/src/tools/create_directory_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..7720eb3595d475560b90cc890396b8f6b27600b1 --- /dev/null +++ b/crates/agent2/src/tools/create_directory_tool.rs @@ -0,0 +1,86 @@ +use agent_client_protocol::ToolKind; +use anyhow::{Context as _, Result, anyhow}; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use util::markdown::MarkdownInlineCode; + +use crate::{AgentTool, ToolCallEventStream}; + +/// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created. +/// +/// This tool creates a directory and all necessary parent directories (similar to `mkdir -p`). It should be used whenever you need to create new directories within the project. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct CreateDirectoryToolInput { + /// The path of the new directory. + /// + /// + /// If the project has the following structure: + /// + /// - directory1/ + /// - directory2/ + /// + /// You can create a new directory by providing a path of "directory1/new_directory" + /// + pub path: String, +} + +pub struct CreateDirectoryTool { + project: Entity, +} + +impl CreateDirectoryTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for CreateDirectoryTool { + type Input = CreateDirectoryToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "create_directory".into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Read + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + format!("Create directory {}", MarkdownInlineCode(&input.path)).into() + } else { + "Create directory".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project_path = match self.project.read(cx).find_project_path(&input.path, cx) { + Some(project_path) => project_path, + None => { + return Task::ready(Err(anyhow!("Path to create was outside the project"))); + } + }; + let destination_path: Arc = input.path.as_str().into(); + + let create_entry = self.project.update(cx, |project, cx| { + project.create_entry(project_path.clone(), true, cx) + }); + + cx.spawn(async move |_cx| { + create_entry + .await + .with_context(|| format!("Creating directory {destination_path}"))?; + + Ok(format!("Created directory {destination_path}")) + }) + } +} diff --git a/crates/agent2/src/tools/delete_path_tool.rs b/crates/agent2/src/tools/delete_path_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..c281f1b5b69e2f3cbc3282a17298e9002f3b7c52 --- /dev/null +++ b/crates/agent2/src/tools/delete_path_tool.rs @@ -0,0 +1,136 @@ +use crate::{AgentTool, ToolCallEventStream}; +use action_log::ActionLog; +use agent_client_protocol::ToolKind; +use anyhow::{Context as _, Result, anyhow}; +use futures::{SinkExt, StreamExt, channel::mpsc}; +use gpui::{App, AppContext, Entity, SharedString, Task}; +use project::{Project, ProjectPath}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +/// Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct DeletePathToolInput { + /// The path of the file or directory to delete. + /// + /// + /// If the project has the following files: + /// + /// - directory1/a/something.txt + /// - directory2/a/things.txt + /// - directory3/a/other.txt + /// + /// You can delete the first file by providing a path of "directory1/a/something.txt" + /// + pub path: String, +} + +pub struct DeletePathTool { + project: Entity, + action_log: Entity, +} + +impl DeletePathTool { + pub fn new(project: Entity, action_log: Entity) -> Self { + Self { + project, + action_log, + } + } +} + +impl AgentTool for DeletePathTool { + type Input = DeletePathToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "delete_path".into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Delete + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + format!("Delete “`{}`”", input.path).into() + } else { + "Delete path".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let path = input.path; + let Some(project_path) = self.project.read(cx).find_project_path(&path, cx) else { + return Task::ready(Err(anyhow!( + "Couldn't delete {path} because that path isn't in this project." + ))); + }; + + let Some(worktree) = self + .project + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!( + "Couldn't delete {path} because that path isn't in this project." + ))); + }; + + let worktree_snapshot = worktree.read(cx).snapshot(); + let (mut paths_tx, mut paths_rx) = mpsc::channel(256); + cx.background_spawn({ + let project_path = project_path.clone(); + async move { + for entry in + worktree_snapshot.traverse_from_path(true, false, false, &project_path.path) + { + if !entry.path.starts_with(&project_path.path) { + break; + } + paths_tx + .send(ProjectPath { + worktree_id: project_path.worktree_id, + path: entry.path.clone(), + }) + .await?; + } + anyhow::Ok(()) + } + }) + .detach(); + + let project = self.project.clone(); + let action_log = self.action_log.clone(); + cx.spawn(async move |cx| { + while let Some(path) = paths_rx.next().await { + if let Ok(buffer) = project + .update(cx, |project, cx| project.open_buffer(path, cx))? + .await + { + action_log.update(cx, |action_log, cx| { + action_log.will_delete_buffer(buffer.clone(), cx) + })?; + } + } + + let deletion_task = project + .update(cx, |project, cx| { + project.delete_file(project_path, false, cx) + })? + .with_context(|| { + format!("Couldn't delete {path} because that path isn't in this project.") + })?; + deletion_task + .await + .with_context(|| format!("Deleting {path}"))?; + Ok(format!("Deleted {path}")) + }) + } +} diff --git a/crates/agent2/src/tools/diagnostics_tool.rs b/crates/agent2/src/tools/diagnostics_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..6ba8b7b377a770fa3af35b725b4427e7102d70c1 --- /dev/null +++ b/crates/agent2/src/tools/diagnostics_tool.rs @@ -0,0 +1,163 @@ +use crate::{AgentTool, ToolCallEventStream}; +use agent_client_protocol as acp; +use anyhow::{Result, anyhow}; +use gpui::{App, Entity, Task}; +use language::{DiagnosticSeverity, OffsetRangeExt}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::{fmt::Write, path::Path, sync::Arc}; +use ui::SharedString; +use util::markdown::MarkdownInlineCode; + +/// Get errors and warnings for the project or a specific file. +/// +/// This tool can be invoked after a series of edits to determine if further edits are necessary, or if the user asks to fix errors or warnings in their codebase. +/// +/// When a path is provided, shows all diagnostics for that specific file. +/// When no path is provided, shows a summary of error and warning counts for all files in the project. +/// +/// +/// To get diagnostics for a specific file: +/// { +/// "path": "src/main.rs" +/// } +/// +/// To get a project-wide diagnostic summary: +/// {} +/// +/// +/// +/// - If you think you can fix a diagnostic, make 1-2 attempts and then give up. +/// - Don't remove code you've generated just because you can't fix an error. The user can help you fix it. +/// +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct DiagnosticsToolInput { + /// The path to get diagnostics for. If not provided, returns a project-wide summary. + /// + /// This path should never be absolute, and the first component + /// of the path should always be a root directory in a project. + /// + /// + /// If the project has the following root directories: + /// + /// - lorem + /// - ipsum + /// + /// If you wanna access diagnostics for `dolor.txt` in `ipsum`, you should use the path `ipsum/dolor.txt`. + /// + pub path: Option, +} + +pub struct DiagnosticsTool { + project: Entity, +} + +impl DiagnosticsTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for DiagnosticsTool { + type Input = DiagnosticsToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "diagnostics".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Read + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Some(path) = input.ok().and_then(|input| match input.path { + Some(path) if !path.is_empty() => Some(path), + _ => None, + }) { + format!("Check diagnostics for {}", MarkdownInlineCode(&path)).into() + } else { + "Check project diagnostics".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + match input.path { + Some(path) if !path.is_empty() => { + let Some(project_path) = self.project.read(cx).find_project_path(&path, cx) else { + return Task::ready(Err(anyhow!("Could not find path {path} in project",))); + }; + + let buffer = self + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)); + + cx.spawn(async move |cx| { + let mut output = String::new(); + let buffer = buffer.await?; + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + + for (_, group) in snapshot.diagnostic_groups(None) { + let entry = &group.entries[group.primary_ix]; + let range = entry.range.to_point(&snapshot); + let severity = match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => "error", + DiagnosticSeverity::WARNING => "warning", + _ => continue, + }; + + writeln!( + output, + "{} at line {}: {}", + severity, + range.start.row + 1, + entry.diagnostic.message + )?; + } + + if output.is_empty() { + Ok("File doesn't have errors or warnings!".to_string()) + } else { + Ok(output) + } + }) + } + _ => { + let project = self.project.read(cx); + let mut output = String::new(); + let mut has_diagnostics = false; + + for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { + if summary.error_count > 0 || summary.warning_count > 0 { + let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) + else { + continue; + }; + + has_diagnostics = true; + output.push_str(&format!( + "{}: {} error(s), {} warning(s)\n", + Path::new(worktree.read(cx).root_name()) + .join(project_path.path) + .display(), + summary.error_count, + summary.warning_count + )); + } + } + + if has_diagnostics { + Task::ready(Ok(output)) + } else { + Task::ready(Ok("No errors or warnings found in the project.".into())) + } + } + } + } +} diff --git a/crates/agent2/src/tools/edit_file_tool.rs b/crates/agent2/src/tools/edit_file_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..f89cace9a84e1f9a877daf9a60503b8d78c8c336 --- /dev/null +++ b/crates/agent2/src/tools/edit_file_tool.rs @@ -0,0 +1,1575 @@ +use crate::{AgentTool, Thread, ToolCallEventStream}; +use acp_thread::Diff; +use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; +use anyhow::{Context as _, Result, anyhow}; +use assistant_tools::edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}; +use cloud_llm_client::CompletionIntent; +use collections::HashSet; +use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; +use indoc::formatdoc; +use language::language_settings::{self, FormatOnSave}; +use language::{LanguageRegistry, ToPoint}; +use language_model::LanguageModelToolResultContent; +use paths; +use project::lsp_store::{FormatTrigger, LspFormatTarget}; +use project::{Project, ProjectPath}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::Settings; +use smol::stream::StreamExt as _; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use ui::SharedString; +use util::ResultExt; + +const DEFAULT_UI_TEXT: &str = "Editing file"; + +/// This is a tool for creating a new file or editing an existing file. For moving or renaming files, you should generally use the `terminal` tool with the 'mv' command instead. +/// +/// Before using this tool: +/// +/// 1. Use the `read_file` tool to understand the file's contents and context +/// +/// 2. Verify the directory path is correct (only applicable when creating new files): +/// - Use the `list_directory` tool to verify the parent directory exists and is the correct location +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct EditFileToolInput { + /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit. + /// + /// Be terse, but also descriptive in what you want to achieve with this edit. Avoid generic instructions. + /// + /// NEVER mention the file path in this description. + /// + /// Fix API endpoint URLs + /// Update copyright year in `page_footer` + /// + /// Make sure to include this field before all the others in the input object so that we can display it immediately. + pub display_description: String, + + /// The full path of the file to create or modify in the project. + /// + /// WARNING: When specifying which file path need changing, you MUST start each path with one of the project's root directories. + /// + /// The following examples assume we have two root directories in the project: + /// - /a/b/backend + /// - /c/d/frontend + /// + /// + /// `backend/src/main.rs` + /// + /// Notice how the file path starts with `backend`. Without that, the path would be ambiguous and the call would fail! + /// + /// + /// + /// `frontend/db.js` + /// + pub path: PathBuf, + /// The mode of operation on the file. Possible values: + /// - 'edit': Make granular edits to an existing file. + /// - 'create': Create a new file if it doesn't exist. + /// - 'overwrite': Replace the entire contents of an existing file. + /// + /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. + pub mode: EditFileMode, +} + +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +struct EditFileToolPartialInput { + #[serde(default)] + path: String, + #[serde(default)] + display_description: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "lowercase")] +pub enum EditFileMode { + Edit, + Create, + Overwrite, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EditFileToolOutput { + #[serde(alias = "original_path")] + input_path: PathBuf, + new_text: String, + old_text: Arc, + #[serde(default)] + diff: String, + #[serde(alias = "raw_output")] + edit_agent_output: EditAgentOutput, +} + +impl From for LanguageModelToolResultContent { + fn from(output: EditFileToolOutput) -> Self { + if output.diff.is_empty() { + "No edits were made.".into() + } else { + format!( + "Edited {}:\n\n```diff\n{}\n```", + output.input_path.display(), + output.diff + ) + .into() + } + } +} + +pub struct EditFileTool { + thread: WeakEntity, + language_registry: Arc, +} + +impl EditFileTool { + pub fn new(thread: WeakEntity, language_registry: Arc) -> Self { + Self { + thread, + language_registry, + } + } + + fn authorize( + &self, + input: &EditFileToolInput, + event_stream: &ToolCallEventStream, + cx: &mut App, + ) -> Task> { + if agent_settings::AgentSettings::get_global(cx).always_allow_tool_actions { + return Task::ready(Ok(())); + } + + // If any path component matches the local settings folder, then this could affect + // the editor in ways beyond the project source, so prompt. + let local_settings_folder = paths::local_settings_folder_relative_path(); + let path = Path::new(&input.path); + if path + .components() + .any(|component| component.as_os_str() == local_settings_folder.as_os_str()) + { + return event_stream.authorize( + format!("{} (local settings)", input.display_description), + cx, + ); + } + + // It's also possible that the global config dir is configured to be inside the project, + // so check for that edge case too. + if let Ok(canonical_path) = std::fs::canonicalize(&input.path) + && canonical_path.starts_with(paths::config_dir()) + { + return event_stream.authorize( + format!("{} (global settings)", input.display_description), + cx, + ); + } + + // Check if path is inside the global config directory + // First check if it's already inside project - if not, try to canonicalize + let Ok(project_path) = self.thread.read_with(cx, |thread, cx| { + thread.project().read(cx).find_project_path(&input.path, cx) + }) else { + return Task::ready(Err(anyhow!("thread was dropped"))); + }; + + // If the path is inside the project, and it's not one of the above edge cases, + // then no confirmation is necessary. Otherwise, confirmation is necessary. + if project_path.is_some() { + Task::ready(Ok(())) + } else { + event_stream.authorize(&input.display_description, cx) + } + } +} + +impl AgentTool for EditFileTool { + type Input = EditFileToolInput; + type Output = EditFileToolOutput; + + fn name(&self) -> SharedString { + "edit_file".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Edit + } + + fn initial_title(&self, input: Result) -> SharedString { + match input { + Ok(input) => input.display_description.into(), + Err(raw_input) => { + if let Some(input) = + serde_json::from_value::(raw_input).ok() + { + let description = input.display_description.trim(); + if !description.is_empty() { + return description.to_string().into(); + } + + let path = input.path.trim().to_string(); + if !path.is_empty() { + return path.into(); + } + } + + DEFAULT_UI_TEXT.into() + } + } + } + + fn run( + self: Arc, + input: Self::Input, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let Ok(project) = self + .thread + .read_with(cx, |thread, _cx| thread.project().clone()) + else { + return Task::ready(Err(anyhow!("thread was dropped"))); + }; + let project_path = match resolve_path(&input, project.clone(), cx) { + Ok(path) => path, + Err(err) => return Task::ready(Err(anyhow!(err))), + }; + let abs_path = project.read(cx).absolute_path(&project_path, cx); + if let Some(abs_path) = abs_path.clone() { + event_stream.update_fields(ToolCallUpdateFields { + locations: Some(vec![acp::ToolCallLocation { + path: abs_path, + line: None, + }]), + ..Default::default() + }); + } + + let authorize = self.authorize(&input, &event_stream, cx); + cx.spawn(async move |cx: &mut AsyncApp| { + authorize.await?; + + let (request, model, action_log) = self.thread.update(cx, |thread, cx| { + let request = thread.build_completion_request(CompletionIntent::ToolResults, cx); + (request, thread.model().cloned(), thread.action_log().clone()) + })?; + let request = request?; + let model = model.context("No language model configured")?; + + let edit_format = EditFormat::from_model(model.clone())?; + let edit_agent = EditAgent::new( + model, + project.clone(), + action_log.clone(), + // TODO: move edit agent to this crate so we can use our templates + assistant_tools::templates::Templates::new(), + edit_format, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer(project_path.clone(), cx) + })? + .await?; + + let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?; + event_stream.update_diff(diff.clone()); + + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let old_text = cx + .background_spawn({ + let old_snapshot = old_snapshot.clone(); + async move { Arc::new(old_snapshot.text()) } + }) + .await; + + + let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) { + edit_agent.edit( + buffer.clone(), + input.display_description.clone(), + &request, + cx, + ) + } else { + edit_agent.overwrite( + buffer.clone(), + input.display_description.clone(), + &request, + cx, + ) + }; + + let mut hallucinated_old_text = false; + let mut ambiguous_ranges = Vec::new(); + let mut emitted_location = false; + while let Some(event) = events.next().await { + match event { + EditAgentOutputEvent::Edited(range) => { + if !emitted_location { + let line = buffer.update(cx, |buffer, _cx| { + range.start.to_point(&buffer.snapshot()).row + }).ok(); + if let Some(abs_path) = abs_path.clone() { + event_stream.update_fields(ToolCallUpdateFields { + locations: Some(vec![ToolCallLocation { path: abs_path, line }]), + ..Default::default() + }); + } + emitted_location = true; + } + }, + EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true, + EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges, + EditAgentOutputEvent::ResolvingEditRange(range) => { + diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx))?; + // if !emitted_location { + // let line = buffer.update(cx, |buffer, _cx| { + // range.start.to_point(&buffer.snapshot()).row + // }).ok(); + // if let Some(abs_path) = abs_path.clone() { + // event_stream.update_fields(ToolCallUpdateFields { + // locations: Some(vec![ToolCallLocation { path: abs_path, line }]), + // ..Default::default() + // }); + // } + // } + } + } + } + + // If format_on_save is enabled, format the buffer + let format_on_save_enabled = buffer + .read_with(cx, |buffer, cx| { + let settings = language_settings::language_settings( + buffer.language().map(|l| l.name()), + buffer.file(), + cx, + ); + settings.format_on_save != FormatOnSave::Off + }) + .unwrap_or(false); + + let edit_agent_output = output.await?; + + if format_on_save_enabled { + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + })?; + + let format_task = project.update(cx, |project, cx| { + project.format( + HashSet::from_iter([buffer.clone()]), + LspFormatTarget::Buffers, + false, // Don't push to history since the tool did it. + FormatTrigger::Save, + cx, + ) + })?; + format_task.await.log_err(); + } + + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? + .await?; + + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + })?; + + let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let (new_text, unified_diff) = cx + .background_spawn({ + let new_snapshot = new_snapshot.clone(); + let old_text = old_text.clone(); + async move { + let new_text = new_snapshot.text(); + let diff = language::unified_diff(&old_text, &new_text); + (new_text, diff) + } + }) + .await; + + diff.update(cx, |diff, cx| diff.finalize(cx)).ok(); + + let input_path = input.path.display(); + if unified_diff.is_empty() { + anyhow::ensure!( + !hallucinated_old_text, + formatdoc! {" + Some edits were produced but none of them could be applied. + Read the relevant sections of {input_path} again so that + I can perform the requested edits. + "} + ); + anyhow::ensure!( + ambiguous_ranges.is_empty(), + { + let line_numbers = ambiguous_ranges + .iter() + .map(|range| range.start.to_string()) + .collect::>() + .join(", "); + formatdoc! {" + matches more than one position in the file (lines: {line_numbers}). Read the + relevant sections of {input_path} again and extend so + that I can perform the requested edits. + "} + } + ); + } + + Ok(EditFileToolOutput { + input_path: input.path, + new_text, + old_text, + diff: unified_diff, + edit_agent_output, + }) + }) + } + + fn replay( + &self, + _input: Self::Input, + output: Self::Output, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Result<()> { + event_stream.update_diff(cx.new(|cx| { + Diff::finalized( + output.input_path, + Some(output.old_text.to_string()), + output.new_text, + self.language_registry.clone(), + cx, + ) + })); + Ok(()) + } +} + +/// Validate that the file path is valid, meaning: +/// +/// - For `edit` and `overwrite`, the path must point to an existing file. +/// - For `create`, the file must not already exist, but it's parent dir must exist. +fn resolve_path( + input: &EditFileToolInput, + project: Entity, + cx: &mut App, +) -> Result { + let project = project.read(cx); + + match input.mode { + EditFileMode::Edit | EditFileMode::Overwrite => { + let path = project + .find_project_path(&input.path, cx) + .context("Can't edit file: path not found")?; + + let entry = project + .entry_for_path(&path, cx) + .context("Can't edit file: path not found")?; + + anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); + Ok(path) + } + + EditFileMode::Create => { + if let Some(path) = project.find_project_path(&input.path, cx) { + anyhow::ensure!( + project.entry_for_path(&path, cx).is_none(), + "Can't create file: file already exists" + ); + } + + let parent_path = input + .path + .parent() + .context("Can't create file: incorrect path")?; + + let parent_project_path = project.find_project_path(&parent_path, cx); + + let parent_entry = parent_project_path + .as_ref() + .and_then(|path| project.entry_for_path(path, cx)) + .context("Can't create file: parent directory doesn't exist")?; + + anyhow::ensure!( + parent_entry.is_dir(), + "Can't create file: parent is not a directory" + ); + + let file_name = input + .path + .file_name() + .context("Can't create file: invalid filename")?; + + let new_file_path = parent_project_path.map(|parent| ProjectPath { + path: Arc::from(parent.path.join(file_name)), + ..parent + }); + + new_file_path.context("Can't create file") + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ContextServerRegistry, Templates}; + use action_log::ActionLog; + use client::TelemetrySettings; + use fs::Fs; + use gpui::{TestAppContext, UpdateGlobal}; + use language_model::fake_provider::FakeLanguageModel; + use prompt_store::ProjectContext; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + #[gpui::test] + async fn test_edit_nonexistent_file(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({})).await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project, + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + action_log, + Templates::new(), + Some(model), + cx, + ) + }); + let result = cx + .update(|cx| { + let input = EditFileToolInput { + display_description: "Some edit".into(), + path: "root/nonexistent_file.txt".into(), + mode: EditFileMode::Edit, + }; + Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run( + input, + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + assert_eq!( + result.unwrap_err().to_string(), + "Can't edit file: path not found" + ); + } + + #[gpui::test] + async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) { + let mode = &EditFileMode::Create; + + let result = test_resolve_path(mode, "root/new.txt", cx); + assert_resolved_path_eq(result.await, "new.txt"); + + let result = test_resolve_path(mode, "new.txt", cx); + assert_resolved_path_eq(result.await, "new.txt"); + + let result = test_resolve_path(mode, "dir/new.txt", cx); + assert_resolved_path_eq(result.await, "dir/new.txt"); + + let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't create file: file already exists" + ); + + let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't create file: parent directory doesn't exist" + ); + } + + #[gpui::test] + async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) { + let mode = &EditFileMode::Edit; + + let path_with_root = "root/dir/subdir/existing.txt"; + let path_without_root = "dir/subdir/existing.txt"; + let result = test_resolve_path(mode, path_with_root, cx); + assert_resolved_path_eq(result.await, path_without_root); + + let result = test_resolve_path(mode, path_without_root, cx); + assert_resolved_path_eq(result.await, path_without_root); + + let result = test_resolve_path(mode, "root/nonexistent.txt", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't edit file: path not found" + ); + + let result = test_resolve_path(mode, "root/dir", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't edit file: path is a directory" + ); + } + + async fn test_resolve_path( + mode: &EditFileMode, + path: &str, + cx: &mut TestAppContext, + ) -> anyhow::Result { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "dir": { + "subdir": { + "existing.txt": "hello" + } + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + let input = EditFileToolInput { + display_description: "Some edit".into(), + path: path.into(), + mode: mode.clone(), + }; + + cx.update(|cx| resolve_path(&input, project, cx)) + } + + fn assert_resolved_path_eq(path: anyhow::Result, expected: &str) { + let actual = path + .expect("Should return valid path") + .path + .to_str() + .unwrap() + .replace("\\", "/"); // Naive Windows paths normalization + assert_eq!(actual, expected); + } + + #[gpui::test] + async fn test_format_on_save(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({"src": {}})).await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + // Set up a Rust language with LSP formatting support + let rust_language = Arc::new(language::Language::new( + language::LanguageConfig { + name: "Rust".into(), + matcher: language::LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + )); + + // Register the language and fake LSP + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_language); + + let mut fake_language_servers = language_registry.register_fake_lsp( + "Rust", + language::FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + // Create the file + fs.save( + path!("/root/src/main.rs").as_ref(), + &"initial content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + + // Open the buffer to trigger LSP initialization + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/root/src/main.rs"), cx) + }) + .await + .unwrap(); + + // Register the buffer with language servers + let _handle = project.update(cx, |project, cx| { + project.register_buffer_with_language_servers(&buffer, cx) + }); + + const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\n"; + const FORMATTED_CONTENT: &str = + "This file was formatted by the fake formatter in the test.\n"; + + // Get the fake language server and set up formatting handler + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.set_request_handler::({ + |_, _| async move { + Ok(Some(vec![lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(1, 0)), + new_text: FORMATTED_CONTENT.to_string(), + }])) + } + }); + + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project, + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + + // First, test with format_on_save enabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::( + cx, + |settings| { + settings.defaults.format_on_save = Some(FormatOnSave::On); + settings.defaults.formatter = + Some(language::language_settings::SelectedFormatter::Auto); + }, + ); + }); + }); + + // Have the model stream unformatted content + let edit_result = { + let edit_task = cx.update(|cx| { + let input = EditFileToolInput { + display_description: "Create main function".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Overwrite, + }; + Arc::new(EditFileTool::new( + thread.downgrade(), + language_registry.clone(), + )) + .run(input, ToolCallEventStream::test().0, cx) + }); + + // Stream the unformatted content + cx.executor().run_until_parked(); + model.send_last_completion_stream_text_chunk(UNFORMATTED_CONTENT.to_string()); + model.end_last_completion_stream(); + + edit_task.await + }; + assert!(edit_result.is_ok()); + + // Wait for any async operations (e.g. formatting) to complete + cx.executor().run_until_parked(); + + // Read the file to verify it was formatted automatically + let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); + assert_eq!( + // Ignore carriage returns on Windows + new_content.replace("\r\n", "\n"), + FORMATTED_CONTENT, + "Code should be formatted when format_on_save is enabled" + ); + + let stale_buffer_count = action_log.read_with(cx, |log, cx| log.stale_buffers(cx).count()); + + assert_eq!( + stale_buffer_count, 0, + "BUG: Buffer is incorrectly marked as stale after format-on-save. Found {} stale buffers. \ + This causes the agent to think the file was modified externally when it was just formatted.", + stale_buffer_count + ); + + // Next, test with format_on_save disabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::( + cx, + |settings| { + settings.defaults.format_on_save = Some(FormatOnSave::Off); + }, + ); + }); + }); + + // Stream unformatted edits again + let edit_result = { + let edit_task = cx.update(|cx| { + let input = EditFileToolInput { + display_description: "Update main function".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Overwrite, + }; + Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run( + input, + ToolCallEventStream::test().0, + cx, + ) + }); + + // Stream the unformatted content + cx.executor().run_until_parked(); + model.send_last_completion_stream_text_chunk(UNFORMATTED_CONTENT.to_string()); + model.end_last_completion_stream(); + + edit_task.await + }; + assert!(edit_result.is_ok()); + + // Wait for any async operations (e.g. formatting) to complete + cx.executor().run_until_parked(); + + // Verify the file was not formatted + let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); + assert_eq!( + // Ignore carriage returns on Windows + new_content.replace("\r\n", "\n"), + UNFORMATTED_CONTENT, + "Code should not be formatted when format_on_save is disabled" + ); + } + + #[gpui::test] + async fn test_remove_trailing_whitespace(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({"src": {}})).await; + + // Create a simple file with trailing whitespace + fs.save( + path!("/root/src/main.rs").as_ref(), + &"initial content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project, + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + + // First, test with remove_trailing_whitespace_on_save enabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::( + cx, + |settings| { + settings.defaults.remove_trailing_whitespace_on_save = Some(true); + }, + ); + }); + }); + + const CONTENT_WITH_TRAILING_WHITESPACE: &str = + "fn main() { \n println!(\"Hello!\"); \n}\n"; + + // Have the model stream content that contains trailing whitespace + let edit_result = { + let edit_task = cx.update(|cx| { + let input = EditFileToolInput { + display_description: "Create main function".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Overwrite, + }; + Arc::new(EditFileTool::new( + thread.downgrade(), + language_registry.clone(), + )) + .run(input, ToolCallEventStream::test().0, cx) + }); + + // Stream the content with trailing whitespace + cx.executor().run_until_parked(); + model.send_last_completion_stream_text_chunk( + CONTENT_WITH_TRAILING_WHITESPACE.to_string(), + ); + model.end_last_completion_stream(); + + edit_task.await + }; + assert!(edit_result.is_ok()); + + // Wait for any async operations (e.g. formatting) to complete + cx.executor().run_until_parked(); + + // Read the file to verify trailing whitespace was removed automatically + assert_eq!( + // Ignore carriage returns on Windows + fs.load(path!("/root/src/main.rs").as_ref()) + .await + .unwrap() + .replace("\r\n", "\n"), + "fn main() {\n println!(\"Hello!\");\n}\n", + "Trailing whitespace should be removed when remove_trailing_whitespace_on_save is enabled" + ); + + // Next, test with remove_trailing_whitespace_on_save disabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::( + cx, + |settings| { + settings.defaults.remove_trailing_whitespace_on_save = Some(false); + }, + ); + }); + }); + + // Stream edits again with trailing whitespace + let edit_result = { + let edit_task = cx.update(|cx| { + let input = EditFileToolInput { + display_description: "Update main function".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Overwrite, + }; + Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run( + input, + ToolCallEventStream::test().0, + cx, + ) + }); + + // Stream the content with trailing whitespace + cx.executor().run_until_parked(); + model.send_last_completion_stream_text_chunk( + CONTENT_WITH_TRAILING_WHITESPACE.to_string(), + ); + model.end_last_completion_stream(); + + edit_task.await + }; + assert!(edit_result.is_ok()); + + // Wait for any async operations (e.g. formatting) to complete + cx.executor().run_until_parked(); + + // Verify the file still has trailing whitespace + // Read the file again - it should still have trailing whitespace + let final_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); + assert_eq!( + // Ignore carriage returns on Windows + final_content.replace("\r\n", "\n"), + CONTENT_WITH_TRAILING_WHITESPACE, + "Trailing whitespace should remain when remove_trailing_whitespace_on_save is disabled" + ); + } + + #[gpui::test] + async fn test_authorize(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project, + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry)); + fs.insert_tree("/root", json!({})).await; + + // Test 1: Path with .zed component should require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "test 1".into(), + path: ".zed/settings.json".into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }); + + let event = stream_rx.expect_authorization().await; + assert_eq!( + event.tool_call.fields.title, + Some("test 1 (local settings)".into()) + ); + + // Test 2: Path outside project should require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "test 2".into(), + path: "/etc/hosts".into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }); + + let event = stream_rx.expect_authorization().await; + assert_eq!(event.tool_call.fields.title, Some("test 2".into())); + + // Test 3: Relative path without .zed should not require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "test 3".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + + // Test 4: Path with .zed in the middle should require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "test 4".into(), + path: "root/.zed/tasks.json".into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }); + let event = stream_rx.expect_authorization().await; + assert_eq!( + event.tool_call.fields.title, + Some("test 4 (local settings)".into()) + ); + + // Test 5: When always_allow_tool_actions is enabled, no confirmation needed + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.always_allow_tool_actions = true; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "test 5.1".into(), + path: ".zed/settings.json".into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "test 5.2".into(), + path: "/etc/hosts".into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + } + + #[gpui::test] + async fn test_authorize_global_config(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({})).await; + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project, + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry)); + + // Test global config paths - these should require confirmation if they exist and are outside the project + let test_cases = vec![ + ( + "/etc/hosts", + true, + "System file should require confirmation", + ), + ( + "/usr/local/bin/script", + true, + "System bin file should require confirmation", + ), + ( + "project/normal_file.rs", + false, + "Normal project file should not require confirmation", + ), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "Edit file".into(), + path: path.into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }); + + if should_confirm { + stream_rx.expect_authorization().await; + } else { + auth.await.unwrap(); + assert!( + stream_rx.try_next().is_err(), + "Failed for case: {} - path: {} - expected no confirmation but got one", + description, + path + ); + } + } + } + + #[gpui::test] + async fn test_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + + // Create multiple worktree directories + fs.insert_tree( + "/workspace/frontend", + json!({ + "src": { + "main.js": "console.log('frontend');" + } + }), + ) + .await; + fs.insert_tree( + "/workspace/backend", + json!({ + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.insert_tree( + "/workspace/shared", + json!({ + ".zed": { + "settings.json": "{}" + } + }), + ) + .await; + + // Create project with multiple worktrees + let project = Project::test( + fs.clone(), + [ + path!("/workspace/frontend").as_ref(), + path!("/workspace/backend").as_ref(), + path!("/workspace/shared").as_ref(), + ], + cx, + ) + .await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry.clone(), + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry)); + + // Test files in different worktrees + let test_cases = vec![ + ("frontend/src/main.js", false, "File in first worktree"), + ("backend/src/main.rs", false, "File in second worktree"), + ( + "shared/.zed/settings.json", + true, + ".zed file in third worktree", + ), + ("/etc/hosts", true, "Absolute path outside all worktrees"), + ( + "../outside/file.txt", + true, + "Relative path outside worktrees", + ), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "Edit file".into(), + path: path.into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }); + + if should_confirm { + stream_rx.expect_authorization().await; + } else { + auth.await.unwrap(); + assert!( + stream_rx.try_next().is_err(), + "Failed for case: {} - path: {} - expected no confirmation but got one", + description, + path + ); + } + } + } + + #[gpui::test] + async fn test_needs_confirmation_edge_cases(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".zed": { + "settings.json": "{}" + }, + "src": { + ".zed": { + "local.json": "{}" + } + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry.clone(), + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry)); + + // Test edge cases + let test_cases = vec![ + // Empty path - find_project_path returns Some for empty paths + ("", false, "Empty path is treated as project root"), + // Root directory + ("/", true, "Root directory should be outside project"), + // Parent directory references - find_project_path resolves these + ( + "project/../other", + false, + "Path with .. is resolved by find_project_path", + ), + ( + "project/./src/file.rs", + false, + "Path with . should work normally", + ), + // Windows-style paths (if on Windows) + #[cfg(target_os = "windows")] + ("C:\\Windows\\System32\\hosts", true, "Windows system path"), + #[cfg(target_os = "windows")] + ("project\\src\\main.rs", false, "Windows-style project path"), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "Edit file".into(), + path: path.into(), + mode: EditFileMode::Edit, + }, + &stream_tx, + cx, + ) + }); + + if should_confirm { + stream_rx.expect_authorization().await; + } else { + auth.await.unwrap(); + assert!( + stream_rx.try_next().is_err(), + "Failed for case: {} - path: {} - expected no confirmation but got one", + description, + path + ); + } + } + } + + #[gpui::test] + async fn test_needs_confirmation_with_different_modes(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + "existing.txt": "content", + ".zed": { + "settings.json": "{}" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry.clone(), + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry)); + + // Test different EditFileMode values + let modes = vec![ + EditFileMode::Edit, + EditFileMode::Create, + EditFileMode::Overwrite, + ]; + + for mode in modes { + // Test .zed path with different modes + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "Edit settings".into(), + path: "project/.zed/settings.json".into(), + mode: mode.clone(), + }, + &stream_tx, + cx, + ) + }); + + stream_rx.expect_authorization().await; + + // Test outside path with different modes + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "Edit file".into(), + path: "/outside/file.txt".into(), + mode: mode.clone(), + }, + &stream_tx, + cx, + ) + }); + + stream_rx.expect_authorization().await; + + // Test normal path with different modes + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize( + &EditFileToolInput { + display_description: "Edit file".into(), + path: "project/normal.txt".into(), + mode: mode.clone(), + }, + &stream_tx, + cx, + ) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + } + } + + #[gpui::test] + async fn test_initial_title_with_partial_input(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + action_log.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry)); + + assert_eq!( + tool.initial_title(Err(json!({ + "path": "src/main.rs", + "display_description": "", + "old_string": "old code", + "new_string": "new code" + }))), + "src/main.rs" + ); + assert_eq!( + tool.initial_title(Err(json!({ + "path": "", + "display_description": "Fix error handling", + "old_string": "old code", + "new_string": "new code" + }))), + "Fix error handling" + ); + assert_eq!( + tool.initial_title(Err(json!({ + "path": "src/main.rs", + "display_description": "Fix error handling", + "old_string": "old code", + "new_string": "new code" + }))), + "Fix error handling" + ); + assert_eq!( + tool.initial_title(Err(json!({ + "path": "", + "display_description": "", + "old_string": "old code", + "new_string": "new code" + }))), + DEFAULT_UI_TEXT + ); + assert_eq!( + tool.initial_title(Err(serde_json::Value::Null)), + DEFAULT_UI_TEXT + ); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + TelemetrySettings::register(cx); + agent_settings::AgentSettings::register(cx); + Project::init_settings(cx); + }); + } +} diff --git a/crates/agent2/src/tools/fetch_tool.rs b/crates/agent2/src/tools/fetch_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..ae26c5fe195da3d73a8ae1da47d072a3bfc3706f --- /dev/null +++ b/crates/agent2/src/tools/fetch_tool.rs @@ -0,0 +1,155 @@ +use std::rc::Rc; +use std::sync::Arc; +use std::{borrow::Cow, cell::RefCell}; + +use agent_client_protocol as acp; +use anyhow::{Context as _, Result, bail}; +use futures::AsyncReadExt as _; +use gpui::{App, AppContext as _, Task}; +use html_to_markdown::{TagHandler, convert_html_to_markdown, markdown}; +use http_client::{AsyncBody, HttpClientWithUrl}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use ui::SharedString; +use util::markdown::MarkdownEscaped; + +use crate::{AgentTool, ToolCallEventStream}; + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +enum ContentType { + Html, + Plaintext, + Json, +} + +/// Fetches a URL and returns the content as Markdown. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct FetchToolInput { + /// The URL to fetch. + url: String, +} + +pub struct FetchTool { + http_client: Arc, +} + +impl FetchTool { + pub fn new(http_client: Arc) -> Self { + Self { http_client } + } + + async fn build_message(http_client: Arc, url: &str) -> Result { + let url = if !url.starts_with("https://") && !url.starts_with("http://") { + Cow::Owned(format!("https://{url}")) + } else { + Cow::Borrowed(url) + }; + + let mut response = http_client.get(&url, AsyncBody::default(), true).await?; + + let mut body = Vec::new(); + response + .body_mut() + .read_to_end(&mut body) + .await + .context("error reading response body")?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let Some(content_type) = response.headers().get("content-type") else { + bail!("missing Content-Type header"); + }; + let content_type = content_type + .to_str() + .context("invalid Content-Type header")?; + + let content_type = if content_type.starts_with("text/plain") { + ContentType::Plaintext + } else if content_type.starts_with("application/json") { + ContentType::Json + } else { + ContentType::Html + }; + + match content_type { + ContentType::Html => { + let mut handlers: Vec = vec![ + Rc::new(RefCell::new(markdown::WebpageChromeRemover)), + Rc::new(RefCell::new(markdown::ParagraphHandler)), + Rc::new(RefCell::new(markdown::HeadingHandler)), + Rc::new(RefCell::new(markdown::ListHandler)), + Rc::new(RefCell::new(markdown::TableHandler::new())), + Rc::new(RefCell::new(markdown::StyledTextHandler)), + ]; + if url.contains("wikipedia.org") { + use html_to_markdown::structure::wikipedia; + + handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover))); + handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler))); + handlers.push(Rc::new( + RefCell::new(wikipedia::WikipediaCodeHandler::new()), + )); + } else { + handlers.push(Rc::new(RefCell::new(markdown::CodeHandler))); + } + + convert_html_to_markdown(&body[..], &mut handlers) + } + ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()), + ContentType::Json => { + let json: serde_json::Value = serde_json::from_slice(&body)?; + + Ok(format!( + "```json\n{}\n```", + serde_json::to_string_pretty(&json)? + )) + } + } + } +} + +impl AgentTool for FetchTool { + type Input = FetchToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "fetch".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Fetch + } + + fn initial_title(&self, input: Result) -> SharedString { + match input { + Ok(input) => format!("Fetch {}", MarkdownEscaped(&input.url)).into(), + Err(_) => "Fetch URL".into(), + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let text = cx.background_spawn({ + let http_client = self.http_client.clone(); + async move { Self::build_message(http_client, &input.url).await } + }); + + cx.foreground_executor().spawn(async move { + let text = text.await?; + if text.trim().is_empty() { + bail!("no textual content found"); + } + Ok(text) + }) + } +} diff --git a/crates/agent2/src/tools/find_path_tool.rs b/crates/agent2/src/tools/find_path_tool.rs index e840fec78c899e764db720fb8cbaa55dbe6fa466..9e11ca6a37d92d6c189e542611fbc396128e6a15 100644 --- a/crates/agent2/src/tools/find_path_tool.rs +++ b/crates/agent2/src/tools/find_path_tool.rs @@ -1,6 +1,8 @@ +use crate::{AgentTool, ToolCallEventStream}; use agent_client_protocol as acp; -use anyhow::{anyhow, Result}; +use anyhow::{Result, anyhow}; use gpui::{App, AppContext, Entity, SharedString, Task}; +use language_model::LanguageModelToolResultContent; use project::Project; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -8,8 +10,6 @@ use std::fmt::Write; use std::{cmp, path::PathBuf, sync::Arc}; use util::paths::PathMatcher; -use crate::{AgentTool, ToolCallEventStream}; - /// Fast file path pattern matching tool that works with any codebase size /// /// - Supports glob patterns like "**/*.js" or "src/**/*.ts" @@ -31,7 +31,6 @@ pub struct FindPathToolInput { /// You can get back the first two paths by providing a glob of "*thing*.txt" /// pub glob: String, - /// Optional starting position for paginated results (0-based). /// When not provided, starts from the beginning. #[serde(default)] @@ -39,8 +38,35 @@ pub struct FindPathToolInput { } #[derive(Debug, Serialize, Deserialize)] -struct FindPathToolOutput { - paths: Vec, +pub struct FindPathToolOutput { + offset: usize, + current_matches_page: Vec, + all_matches_len: usize, +} + +impl From for LanguageModelToolResultContent { + fn from(output: FindPathToolOutput) -> Self { + if output.current_matches_page.is_empty() { + "No matches found".into() + } else { + let mut llm_output = format!("Found {} total matches.", output.all_matches_len); + if output.all_matches_len > RESULTS_PER_PAGE { + write!( + &mut llm_output, + "\nShowing results {}-{} (provide 'offset' parameter for more results):", + output.offset + 1, + output.offset + output.current_matches_page.len() + ) + .unwrap(); + } + + for mat in output.current_matches_page { + write!(&mut llm_output, "\n{}", mat.display()).unwrap(); + } + + llm_output.into() + } + } } const RESULTS_PER_PAGE: usize = 50; @@ -57,6 +83,7 @@ impl FindPathTool { impl AgentTool for FindPathTool { type Input = FindPathToolInput; + type Output = FindPathToolOutput; fn name(&self) -> SharedString { "find_path".into() @@ -66,8 +93,12 @@ impl AgentTool for FindPathTool { acp::ToolKind::Search } - fn initial_title(&self, input: Self::Input) -> SharedString { - format!("Find paths matching “`{}`”", input.glob).into() + fn initial_title(&self, input: Result) -> SharedString { + let mut title = "Find paths".to_string(); + if let Ok(input) = input { + title.push_str(&format!(" matching “`{}`”", input.glob)); + } + title.into() } fn run( @@ -75,7 +106,7 @@ impl AgentTool for FindPathTool { input: Self::Input, event_stream: ToolCallEventStream, cx: &mut App, - ) -> Task> { + ) -> Task> { let search_paths_task = search_paths(&input.glob, self.project.clone(), cx); cx.background_spawn(async move { @@ -83,8 +114,8 @@ impl AgentTool for FindPathTool { let paginated_matches: &[PathBuf] = &matches[cmp::min(input.offset, matches.len()) ..cmp::min(input.offset + RESULTS_PER_PAGE, matches.len())]; - event_stream.send_update(acp::ToolCallUpdateFields { - title: Some(if paginated_matches.len() == 0 { + event_stream.update_fields(acp::ToolCallUpdateFields { + title: Some(if paginated_matches.is_empty() { "No matches".into() } else if paginated_matches.len() == 1 { "1 match".into() @@ -107,32 +138,14 @@ impl AgentTool for FindPathTool { }) .collect(), ), - raw_output: Some(serde_json::json!({ - "paths": &matches, - })), ..Default::default() }); - if matches.is_empty() { - Ok("No matches found".into()) - } else { - let mut message = format!("Found {} total matches.", matches.len()); - if matches.len() > RESULTS_PER_PAGE { - write!( - &mut message, - "\nShowing results {}-{} (provide 'offset' parameter for more results):", - input.offset + 1, - input.offset + paginated_matches.len() - ) - .unwrap(); - } - - for mat in matches.iter().skip(input.offset).take(RESULTS_PER_PAGE) { - write!(&mut message, "\n{}", mat.display()).unwrap(); - } - - Ok(message) - } + Ok(FindPathToolOutput { + offset: input.offset, + current_matches_page: paginated_matches.to_vec(), + all_matches_len: matches.len(), + }) }) } } diff --git a/crates/agent2/src/tools/grep_tool.rs b/crates/agent2/src/tools/grep_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..955dae723558c3b8b3324109c18e9448215d66a3 --- /dev/null +++ b/crates/agent2/src/tools/grep_tool.rs @@ -0,0 +1,1182 @@ +use crate::{AgentTool, ToolCallEventStream}; +use agent_client_protocol as acp; +use anyhow::{Result, anyhow}; +use futures::StreamExt; +use gpui::{App, Entity, SharedString, Task}; +use language::{OffsetRangeExt, ParseStatus, Point}; +use project::{ + Project, WorktreeSettings, + search::{SearchQuery, SearchResult}, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::Settings; +use std::{cmp, fmt::Write, sync::Arc}; +use util::RangeExt; +use util::markdown::MarkdownInlineCode; +use util::paths::PathMatcher; + +/// Searches the contents of files in the project with a regular expression +/// +/// - Prefer this tool to path search when searching for symbols in the project, because you won't need to guess what path it's in. +/// - Supports full regex syntax (eg. "log.*Error", "function\\s+\\w+", etc.) +/// - Pass an `include_pattern` if you know how to narrow your search on the files system +/// - Never use this tool to search for paths. Only search file contents with this tool. +/// - Use this tool when you need to find files containing specific patterns +/// - Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages. +/// - DO NOT use HTML entities solely to escape characters in the tool parameters. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct GrepToolInput { + /// A regex pattern to search for in the entire project. Note that the regex will be parsed by the Rust `regex` crate. + /// + /// Do NOT specify a path here! This will only be matched against the code **content**. + pub regex: String, + /// A glob pattern for the paths of files to include in the search. + /// Supports standard glob patterns like "**/*.rs" or "src/**/*.ts". + /// If omitted, all files in the project will be searched. + pub include_pattern: Option, + /// Optional starting position for paginated results (0-based). + /// When not provided, starts from the beginning. + #[serde(default)] + pub offset: u32, + /// Whether the regex is case-sensitive. Defaults to false (case-insensitive). + #[serde(default)] + pub case_sensitive: bool, +} + +impl GrepToolInput { + /// Which page of search results this is. + pub fn page(&self) -> u32 { + 1 + (self.offset / RESULTS_PER_PAGE) + } +} + +const RESULTS_PER_PAGE: u32 = 20; + +pub struct GrepTool { + project: Entity, +} + +impl GrepTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for GrepTool { + type Input = GrepToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "grep".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Search + } + + fn initial_title(&self, input: Result) -> SharedString { + match input { + Ok(input) => { + let page = input.page(); + let regex_str = MarkdownInlineCode(&input.regex); + let case_info = if input.case_sensitive { + " (case-sensitive)" + } else { + "" + }; + + if page > 1 { + format!("Get page {page} of search results for regex {regex_str}{case_info}") + } else { + format!("Search files for regex {regex_str}{case_info}") + } + } + Err(_) => "Search with regex".into(), + } + .into() + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + const CONTEXT_LINES: u32 = 2; + const MAX_ANCESTOR_LINES: u32 = 10; + + let include_matcher = match PathMatcher::new( + input + .include_pattern + .as_ref() + .into_iter() + .collect::>(), + ) { + Ok(matcher) => matcher, + Err(error) => { + return Task::ready(Err(anyhow!("invalid include glob pattern: {error}"))); + } + }; + + // Exclude global file_scan_exclusions and private_files settings + let exclude_matcher = { + let global_settings = WorktreeSettings::get_global(cx); + let exclude_patterns = global_settings + .file_scan_exclusions + .sources() + .iter() + .chain(global_settings.private_files.sources().iter()); + + match PathMatcher::new(exclude_patterns) { + Ok(matcher) => matcher, + Err(error) => { + return Task::ready(Err(anyhow!("invalid exclude pattern: {error}"))); + } + } + }; + + let query = match SearchQuery::regex( + &input.regex, + false, + input.case_sensitive, + false, + false, + include_matcher, + exclude_matcher, + true, // Always match file include pattern against *full project paths* that start with a project root. + None, + ) { + Ok(query) => query, + Err(error) => return Task::ready(Err(error)), + }; + + let results = self + .project + .update(cx, |project, cx| project.search(query, cx)); + + let project = self.project.downgrade(); + cx.spawn(async move |cx| { + futures::pin_mut!(results); + + let mut output = String::new(); + let mut skips_remaining = input.offset; + let mut matches_found = 0; + let mut has_more_matches = false; + + 'outer: while let Some(SearchResult::Buffer { buffer, ranges }) = results.next().await { + if ranges.is_empty() { + continue; + } + + let Ok((Some(path), mut parse_status)) = buffer.read_with(cx, |buffer, cx| { + (buffer.file().map(|file| file.full_path(cx)), buffer.parse_status()) + }) else { + continue; + }; + + // Check if this file should be excluded based on its worktree settings + if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| { + project.find_project_path(&path, cx) + }) + && cx.update(|cx| { + let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); + worktree_settings.is_path_excluded(&project_path.path) + || worktree_settings.is_path_private(&project_path.path) + }).unwrap_or(false) { + continue; + } + + while *parse_status.borrow() != ParseStatus::Idle { + parse_status.changed().await?; + } + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + + let mut ranges = ranges + .into_iter() + .map(|range| { + let matched = range.to_point(&snapshot); + let matched_end_line_len = snapshot.line_len(matched.end.row); + let full_lines = Point::new(matched.start.row, 0)..Point::new(matched.end.row, matched_end_line_len); + let symbols = snapshot.symbols_containing(matched.start, None); + + if let Some(ancestor_node) = snapshot.syntax_ancestor(full_lines.clone()) { + let full_ancestor_range = ancestor_node.byte_range().to_point(&snapshot); + let end_row = full_ancestor_range.end.row.min(full_ancestor_range.start.row + MAX_ANCESTOR_LINES); + let end_col = snapshot.line_len(end_row); + let capped_ancestor_range = Point::new(full_ancestor_range.start.row, 0)..Point::new(end_row, end_col); + + if capped_ancestor_range.contains_inclusive(&full_lines) { + return (capped_ancestor_range, Some(full_ancestor_range), symbols) + } + } + + let mut matched = matched; + matched.start.column = 0; + matched.start.row = + matched.start.row.saturating_sub(CONTEXT_LINES); + matched.end.row = cmp::min( + snapshot.max_point().row, + matched.end.row + CONTEXT_LINES, + ); + matched.end.column = snapshot.line_len(matched.end.row); + + (matched, None, symbols) + }) + .peekable(); + + let mut file_header_written = false; + + while let Some((mut range, ancestor_range, parent_symbols)) = ranges.next(){ + if skips_remaining > 0 { + skips_remaining -= 1; + continue; + } + + // We'd already found a full page of matches, and we just found one more. + if matches_found >= RESULTS_PER_PAGE { + has_more_matches = true; + break 'outer; + } + + while let Some((next_range, _, _)) = ranges.peek() { + if range.end.row >= next_range.start.row { + range.end = next_range.end; + ranges.next(); + } else { + break; + } + } + + if !file_header_written { + writeln!(output, "\n## Matches in {}", path.display())?; + file_header_written = true; + } + + let end_row = range.end.row; + output.push_str("\n### "); + + if let Some(parent_symbols) = &parent_symbols { + for symbol in parent_symbols { + write!(output, "{} › ", symbol.text)?; + } + } + + if range.start.row == end_row { + writeln!(output, "L{}", range.start.row + 1)?; + } else { + writeln!(output, "L{}-{}", range.start.row + 1, end_row + 1)?; + } + + output.push_str("```\n"); + output.extend(snapshot.text_for_range(range)); + output.push_str("\n```\n"); + + if let Some(ancestor_range) = ancestor_range + && end_row < ancestor_range.end.row { + let remaining_lines = ancestor_range.end.row - end_row; + writeln!(output, "\n{} lines remaining in ancestor node. Read the file to see all.", remaining_lines)?; + } + + matches_found += 1; + } + } + + if matches_found == 0 { + Ok("No matches found".into()) + } else if has_more_matches { + Ok(format!( + "Showing matches {}-{} (there were more matches found; use offset: {} to see next page):\n{output}", + input.offset + 1, + input.offset + matches_found, + input.offset + RESULTS_PER_PAGE, + )) + } else { + Ok(format!("Found {matches_found} matches:\n{output}")) + } + }) + } +} + +#[cfg(test)] +mod tests { + use crate::ToolCallEventStream; + + use super::*; + use gpui::{TestAppContext, UpdateGlobal}; + use language::{Language, LanguageConfig, LanguageMatcher}; + use project::{FakeFs, Project, WorktreeSettings}; + use serde_json::json; + use settings::SettingsStore; + use unindent::Unindent; + use util::path; + + #[gpui::test] + async fn test_grep_tool_with_include_pattern(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + serde_json::json!({ + "src": { + "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}", + "utils": { + "helper.rs": "fn helper() {\n println!(\"I'm a helper!\");\n}", + }, + }, + "tests": { + "test_main.rs": "fn test_main() {\n assert!(true);\n}", + } + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + // Test with include pattern for Rust files inside the root of the project + let input = GrepToolInput { + regex: "println".to_string(), + include_pattern: Some("root/**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + assert!(result.contains("main.rs"), "Should find matches in main.rs"); + assert!( + result.contains("helper.rs"), + "Should find matches in helper.rs" + ); + assert!( + !result.contains("test_main.rs"), + "Should not include test_main.rs even though it's a .rs file (because it doesn't have the pattern)" + ); + + // Test with include pattern for src directory only + let input = GrepToolInput { + regex: "fn".to_string(), + include_pattern: Some("root/**/src/**".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + assert!( + result.contains("main.rs"), + "Should find matches in src/main.rs" + ); + assert!( + result.contains("helper.rs"), + "Should find matches in src/utils/helper.rs" + ); + assert!( + !result.contains("test_main.rs"), + "Should not include test_main.rs as it's not in src directory" + ); + + // Test with empty include pattern (should default to all files) + let input = GrepToolInput { + regex: "fn".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + assert!(result.contains("main.rs"), "Should find matches in main.rs"); + assert!( + result.contains("helper.rs"), + "Should find matches in helper.rs" + ); + assert!( + result.contains("test_main.rs"), + "Should include test_main.rs" + ); + } + + #[gpui::test] + async fn test_grep_tool_with_case_sensitivity(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + serde_json::json!({ + "case_test.txt": "This file has UPPERCASE and lowercase text.\nUPPERCASE patterns should match only with case_sensitive: true", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + // Test case-insensitive search (default) + let input = GrepToolInput { + regex: "uppercase".to_string(), + include_pattern: Some("**/*.txt".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + assert!( + result.contains("UPPERCASE"), + "Case-insensitive search should match uppercase" + ); + + // Test case-sensitive search + let input = GrepToolInput { + regex: "uppercase".to_string(), + include_pattern: Some("**/*.txt".to_string()), + offset: 0, + case_sensitive: true, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + assert!( + !result.contains("UPPERCASE"), + "Case-sensitive search should not match uppercase" + ); + + // Test case-sensitive search + let input = GrepToolInput { + regex: "LOWERCASE".to_string(), + include_pattern: Some("**/*.txt".to_string()), + offset: 0, + case_sensitive: true, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + + assert!( + !result.contains("lowercase"), + "Case-sensitive search should match lowercase" + ); + + // Test case-sensitive search for lowercase pattern + let input = GrepToolInput { + regex: "lowercase".to_string(), + include_pattern: Some("**/*.txt".to_string()), + offset: 0, + case_sensitive: true, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + assert!( + result.contains("lowercase"), + "Case-sensitive search should match lowercase text" + ); + } + + /// Helper function to set up a syntax test environment + async fn setup_syntax_test(cx: &mut TestAppContext) -> Entity { + use unindent::Unindent; + init_test(cx); + cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.executor()); + + // Create test file with syntax structures + fs.insert_tree( + path!("/root"), + serde_json::json!({ + "test_syntax.rs": r#" + fn top_level_function() { + println!("This is at the top level"); + } + + mod feature_module { + pub mod nested_module { + pub fn nested_function( + first_arg: String, + second_arg: i32, + ) { + println!("Function in nested module"); + println!("{first_arg}"); + println!("{second_arg}"); + } + } + } + + struct MyStruct { + field1: String, + field2: i32, + } + + impl MyStruct { + fn method_with_block() { + let condition = true; + if condition { + println!("Inside if block"); + } + } + + fn long_function() { + println!("Line 1"); + println!("Line 2"); + println!("Line 3"); + println!("Line 4"); + println!("Line 5"); + println!("Line 6"); + println!("Line 7"); + println!("Line 8"); + println!("Line 9"); + println!("Line 10"); + println!("Line 11"); + println!("Line 12"); + } + } + + trait Processor { + fn process(&self, input: &str) -> String; + } + + impl Processor for MyStruct { + fn process(&self, input: &str) -> String { + format!("Processed: {}", input) + } + } + "#.unindent().trim(), + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + project.update(cx, |project, _cx| { + project.languages().add(rust_lang().into()) + }); + + project + } + + #[gpui::test] + async fn test_grep_top_level_function(cx: &mut TestAppContext) { + let project = setup_syntax_test(cx).await; + + // Test: Line at the top level of the file + let input = GrepToolInput { + regex: "This is at the top level".to_string(), + include_pattern: Some("**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + let expected = r#" + Found 1 matches: + + ## Matches in root/test_syntax.rs + + ### fn top_level_function › L1-3 + ``` + fn top_level_function() { + println!("This is at the top level"); + } + ``` + "# + .unindent(); + assert_eq!(result, expected); + } + + #[gpui::test] + async fn test_grep_function_body(cx: &mut TestAppContext) { + let project = setup_syntax_test(cx).await; + + // Test: Line inside a function body + let input = GrepToolInput { + regex: "Function in nested module".to_string(), + include_pattern: Some("**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + let expected = r#" + Found 1 matches: + + ## Matches in root/test_syntax.rs + + ### mod feature_module › pub mod nested_module › pub fn nested_function › L10-14 + ``` + ) { + println!("Function in nested module"); + println!("{first_arg}"); + println!("{second_arg}"); + } + ``` + "# + .unindent(); + assert_eq!(result, expected); + } + + #[gpui::test] + async fn test_grep_function_args_and_body(cx: &mut TestAppContext) { + let project = setup_syntax_test(cx).await; + + // Test: Line with a function argument + let input = GrepToolInput { + regex: "second_arg".to_string(), + include_pattern: Some("**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + let expected = r#" + Found 1 matches: + + ## Matches in root/test_syntax.rs + + ### mod feature_module › pub mod nested_module › pub fn nested_function › L7-14 + ``` + pub fn nested_function( + first_arg: String, + second_arg: i32, + ) { + println!("Function in nested module"); + println!("{first_arg}"); + println!("{second_arg}"); + } + ``` + "# + .unindent(); + assert_eq!(result, expected); + } + + #[gpui::test] + async fn test_grep_if_block(cx: &mut TestAppContext) { + use unindent::Unindent; + let project = setup_syntax_test(cx).await; + + // Test: Line inside an if block + let input = GrepToolInput { + regex: "Inside if block".to_string(), + include_pattern: Some("**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + let expected = r#" + Found 1 matches: + + ## Matches in root/test_syntax.rs + + ### impl MyStruct › fn method_with_block › L26-28 + ``` + if condition { + println!("Inside if block"); + } + ``` + "# + .unindent(); + assert_eq!(result, expected); + } + + #[gpui::test] + async fn test_grep_long_function_top(cx: &mut TestAppContext) { + use unindent::Unindent; + let project = setup_syntax_test(cx).await; + + // Test: Line in the middle of a long function - should show message about remaining lines + let input = GrepToolInput { + regex: "Line 5".to_string(), + include_pattern: Some("**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + let expected = r#" + Found 1 matches: + + ## Matches in root/test_syntax.rs + + ### impl MyStruct › fn long_function › L31-41 + ``` + fn long_function() { + println!("Line 1"); + println!("Line 2"); + println!("Line 3"); + println!("Line 4"); + println!("Line 5"); + println!("Line 6"); + println!("Line 7"); + println!("Line 8"); + println!("Line 9"); + println!("Line 10"); + ``` + + 3 lines remaining in ancestor node. Read the file to see all. + "# + .unindent(); + assert_eq!(result, expected); + } + + #[gpui::test] + async fn test_grep_long_function_bottom(cx: &mut TestAppContext) { + use unindent::Unindent; + let project = setup_syntax_test(cx).await; + + // Test: Line in the long function + let input = GrepToolInput { + regex: "Line 12".to_string(), + include_pattern: Some("**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }; + + let result = run_grep_tool(input, project.clone(), cx).await; + let expected = r#" + Found 1 matches: + + ## Matches in root/test_syntax.rs + + ### impl MyStruct › fn long_function › L41-45 + ``` + println!("Line 10"); + println!("Line 11"); + println!("Line 12"); + } + } + ``` + "# + .unindent(); + assert_eq!(result, expected); + } + + async fn run_grep_tool( + input: GrepToolInput, + project: Entity, + cx: &mut TestAppContext, + ) -> String { + let tool = Arc::new(GrepTool { project }); + let task = cx.update(|cx| tool.run(input, ToolCallEventStream::test().0, cx)); + + match task.await { + Ok(result) => { + if cfg!(windows) { + result.replace("root\\", "root/") + } else { + result + } + } + Err(e) => panic!("Failed to run grep tool: {}", e), + } + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + }); + } + + fn rust_lang() -> Language { + Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_outline_query(include_str!("../../../languages/src/rust/outline.scm")) + .unwrap() + } + + #[gpui::test] + async fn test_grep_security_boundaries(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + path!("/"), + json!({ + "project_root": { + "allowed_file.rs": "fn main() { println!(\"This file is in the project\"); }", + ".mysecrets": "SECRET_KEY=abc123\nfn secret() { /* private */ }", + ".secretdir": { + "config": "fn special_configuration() { /* excluded */ }" + }, + ".mymetadata": "fn custom_metadata() { /* excluded */ }", + "subdir": { + "normal_file.rs": "fn normal_file_content() { /* Normal */ }", + "special.privatekey": "fn private_key_content() { /* private */ }", + "data.mysensitive": "fn sensitive_data() { /* private */ }" + } + }, + "outside_project": { + "sensitive_file.rs": "fn outside_function() { /* This file is outside the project */ }" + } + }), + ) + .await; + + cx.update(|cx| { + use gpui::UpdateGlobal; + use project::WorktreeSettings; + use settings::SettingsStore; + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.file_scan_exclusions = Some(vec![ + "**/.secretdir".to_string(), + "**/.mymetadata".to_string(), + ]); + settings.private_files = Some(vec![ + "**/.mysecrets".to_string(), + "**/*.privatekey".to_string(), + "**/*.mysensitive".to_string(), + ]); + }); + }); + }); + + let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; + + // Searching for files outside the project worktree should return no results + let result = run_grep_tool( + GrepToolInput { + regex: "outside_function".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.is_empty(), + "grep_tool should not find files outside the project worktree" + ); + + // Searching within the project should succeed + let result = run_grep_tool( + GrepToolInput { + regex: "main".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.iter().any(|p| p.contains("allowed_file.rs")), + "grep_tool should be able to search files inside worktrees" + ); + + // Searching files that match file_scan_exclusions should return no results + let result = run_grep_tool( + GrepToolInput { + regex: "special_configuration".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.is_empty(), + "grep_tool should not search files in .secretdir (file_scan_exclusions)" + ); + + let result = run_grep_tool( + GrepToolInput { + regex: "custom_metadata".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.is_empty(), + "grep_tool should not search .mymetadata files (file_scan_exclusions)" + ); + + // Searching private files should return no results + let result = run_grep_tool( + GrepToolInput { + regex: "SECRET_KEY".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.is_empty(), + "grep_tool should not search .mysecrets (private_files)" + ); + + let result = run_grep_tool( + GrepToolInput { + regex: "private_key_content".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + + assert!( + paths.is_empty(), + "grep_tool should not search .privatekey files (private_files)" + ); + + let result = run_grep_tool( + GrepToolInput { + regex: "sensitive_data".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.is_empty(), + "grep_tool should not search .mysensitive files (private_files)" + ); + + // Searching a normal file should still work, even with private_files configured + let result = run_grep_tool( + GrepToolInput { + regex: "normal_file_content".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.iter().any(|p| p.contains("normal_file.rs")), + "Should be able to search normal files" + ); + + // Path traversal attempts with .. in include_pattern should not escape project + let result = run_grep_tool( + GrepToolInput { + regex: "outside_function".to_string(), + include_pattern: Some("../outside_project/**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + assert!( + paths.is_empty(), + "grep_tool should not allow escaping project boundaries with relative paths" + ); + } + + #[gpui::test] + async fn test_grep_with_multiple_worktree_settings(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + + // Create first worktree with its own private files + fs.insert_tree( + path!("/worktree1"), + json!({ + ".zed": { + "settings.json": r#"{ + "file_scan_exclusions": ["**/fixture.*"], + "private_files": ["**/secret.rs"] + }"# + }, + "src": { + "main.rs": "fn main() { let secret_key = \"hidden\"; }", + "secret.rs": "const API_KEY: &str = \"secret_value\";", + "utils.rs": "pub fn get_config() -> String { \"config\".to_string() }" + }, + "tests": { + "test.rs": "fn test_secret() { assert!(true); }", + "fixture.sql": "SELECT * FROM secret_table;" + } + }), + ) + .await; + + // Create second worktree with different private files + fs.insert_tree( + path!("/worktree2"), + json!({ + ".zed": { + "settings.json": r#"{ + "file_scan_exclusions": ["**/internal.*"], + "private_files": ["**/private.js", "**/data.json"] + }"# + }, + "lib": { + "public.js": "export function getSecret() { return 'public'; }", + "private.js": "const SECRET_KEY = \"private_value\";", + "data.json": "{\"secret_data\": \"hidden\"}" + }, + "docs": { + "README.md": "# Documentation with secret info", + "internal.md": "Internal secret documentation" + } + }), + ) + .await; + + // Set global settings + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]); + settings.private_files = Some(vec!["**/.env".to_string()]); + }); + }); + }); + + let project = Project::test( + fs.clone(), + [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], + cx, + ) + .await; + + // Wait for worktrees to be fully scanned + cx.executor().run_until_parked(); + + // Search for "secret" - should exclude files based on worktree-specific settings + let result = run_grep_tool( + GrepToolInput { + regex: "secret".to_string(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + let paths = extract_paths_from_results(&result); + + // Should find matches in non-private files + assert!( + paths.iter().any(|p| p.contains("main.rs")), + "Should find 'secret' in worktree1/src/main.rs" + ); + assert!( + paths.iter().any(|p| p.contains("test.rs")), + "Should find 'secret' in worktree1/tests/test.rs" + ); + assert!( + paths.iter().any(|p| p.contains("public.js")), + "Should find 'secret' in worktree2/lib/public.js" + ); + assert!( + paths.iter().any(|p| p.contains("README.md")), + "Should find 'secret' in worktree2/docs/README.md" + ); + + // Should NOT find matches in private/excluded files based on worktree settings + assert!( + !paths.iter().any(|p| p.contains("secret.rs")), + "Should not search in worktree1/src/secret.rs (local private_files)" + ); + assert!( + !paths.iter().any(|p| p.contains("fixture.sql")), + "Should not search in worktree1/tests/fixture.sql (local file_scan_exclusions)" + ); + assert!( + !paths.iter().any(|p| p.contains("private.js")), + "Should not search in worktree2/lib/private.js (local private_files)" + ); + assert!( + !paths.iter().any(|p| p.contains("data.json")), + "Should not search in worktree2/lib/data.json (local private_files)" + ); + assert!( + !paths.iter().any(|p| p.contains("internal.md")), + "Should not search in worktree2/docs/internal.md (local file_scan_exclusions)" + ); + + // Test with `include_pattern` specific to one worktree + let result = run_grep_tool( + GrepToolInput { + regex: "secret".to_string(), + include_pattern: Some("worktree1/**/*.rs".to_string()), + offset: 0, + case_sensitive: false, + }, + project.clone(), + cx, + ) + .await; + + let paths = extract_paths_from_results(&result); + + // Should only find matches in worktree1 *.rs files (excluding private ones) + assert!( + paths.iter().any(|p| p.contains("main.rs")), + "Should find match in worktree1/src/main.rs" + ); + assert!( + paths.iter().any(|p| p.contains("test.rs")), + "Should find match in worktree1/tests/test.rs" + ); + assert!( + !paths.iter().any(|p| p.contains("secret.rs")), + "Should not find match in excluded worktree1/src/secret.rs" + ); + assert!( + paths.iter().all(|p| !p.contains("worktree2")), + "Should not find any matches in worktree2" + ); + } + + // Helper function to extract file paths from grep results + fn extract_paths_from_results(results: &str) -> Vec { + results + .lines() + .filter(|line| line.starts_with("## Matches in ")) + .map(|line| { + line.strip_prefix("## Matches in ") + .unwrap() + .trim() + .to_string() + }) + .collect() + } +} diff --git a/crates/agent2/src/tools/list_directory_tool.rs b/crates/agent2/src/tools/list_directory_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..31575a92e44aa66558175b078b6c8e6087a67653 --- /dev/null +++ b/crates/agent2/src/tools/list_directory_tool.rs @@ -0,0 +1,662 @@ +use crate::{AgentTool, ToolCallEventStream}; +use agent_client_protocol::ToolKind; +use anyhow::{Result, anyhow}; +use gpui::{App, Entity, SharedString, Task}; +use project::{Project, WorktreeSettings}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::Settings; +use std::fmt::Write; +use std::{path::Path, sync::Arc}; +use util::markdown::MarkdownInlineCode; + +/// Lists files and directories in a given path. Prefer the `grep` or `find_path` tools when searching the codebase. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct ListDirectoryToolInput { + /// The fully-qualified path of the directory to list in the project. + /// + /// This path should never be absolute, and the first component of the path should always be a root directory in a project. + /// + /// + /// If the project has the following root directories: + /// + /// - directory1 + /// - directory2 + /// + /// You can list the contents of `directory1` by using the path `directory1`. + /// + /// + /// + /// If the project has the following root directories: + /// + /// - foo + /// - bar + /// + /// If you wanna list contents in the directory `foo/baz`, you should use the path `foo/baz`. + /// + pub path: String, +} + +pub struct ListDirectoryTool { + project: Entity, +} + +impl ListDirectoryTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for ListDirectoryTool { + type Input = ListDirectoryToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "list_directory".into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Read + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + let path = MarkdownInlineCode(&input.path); + format!("List the {path} directory's contents").into() + } else { + "List directory".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + // Sometimes models will return these even though we tell it to give a path and not a glob. + // When this happens, just list the root worktree directories. + if matches!(input.path.as_str(), "." | "" | "./" | "*") { + let output = self + .project + .read(cx) + .worktrees(cx) + .filter_map(|worktree| { + worktree.read(cx).root_entry().and_then(|entry| { + if entry.is_dir() { + entry.path.to_str() + } else { + None + } + }) + }) + .collect::>() + .join("\n"); + + return Task::ready(Ok(output)); + } + + let Some(project_path) = self.project.read(cx).find_project_path(&input.path, cx) else { + return Task::ready(Err(anyhow!("Path {} not found in project", input.path))); + }; + let Some(worktree) = self + .project + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("Worktree not found"))); + }; + + // Check if the directory whose contents we're listing is itself excluded or private + let global_settings = WorktreeSettings::get_global(cx); + if global_settings.is_path_excluded(&project_path.path) { + return Task::ready(Err(anyhow!( + "Cannot list directory because its path matches the user's global `file_scan_exclusions` setting: {}", + &input.path + ))); + } + + if global_settings.is_path_private(&project_path.path) { + return Task::ready(Err(anyhow!( + "Cannot list directory because its path matches the user's global `private_files` setting: {}", + &input.path + ))); + } + + let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); + if worktree_settings.is_path_excluded(&project_path.path) { + return Task::ready(Err(anyhow!( + "Cannot list directory because its path matches the user's worktree`file_scan_exclusions` setting: {}", + &input.path + ))); + } + + if worktree_settings.is_path_private(&project_path.path) { + return Task::ready(Err(anyhow!( + "Cannot list directory because its path matches the user's worktree `private_paths` setting: {}", + &input.path + ))); + } + + let worktree_snapshot = worktree.read(cx).snapshot(); + let worktree_root_name = worktree.read(cx).root_name().to_string(); + + let Some(entry) = worktree_snapshot.entry_for_path(&project_path.path) else { + return Task::ready(Err(anyhow!("Path not found: {}", input.path))); + }; + + if !entry.is_dir() { + return Task::ready(Err(anyhow!("{} is not a directory.", input.path))); + } + let worktree_snapshot = worktree.read(cx).snapshot(); + + let mut folders = Vec::new(); + let mut files = Vec::new(); + + for entry in worktree_snapshot.child_entries(&project_path.path) { + // Skip private and excluded files and directories + if global_settings.is_path_private(&entry.path) + || global_settings.is_path_excluded(&entry.path) + { + continue; + } + + if self + .project + .read(cx) + .find_project_path(&entry.path, cx) + .map(|project_path| { + let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); + + worktree_settings.is_path_excluded(&project_path.path) + || worktree_settings.is_path_private(&project_path.path) + }) + .unwrap_or(false) + { + continue; + } + + let full_path = Path::new(&worktree_root_name) + .join(&entry.path) + .display() + .to_string(); + if entry.is_dir() { + folders.push(full_path); + } else { + files.push(full_path); + } + } + + let mut output = String::new(); + + if !folders.is_empty() { + writeln!(output, "# Folders:\n{}", folders.join("\n")).unwrap(); + } + + if !files.is_empty() { + writeln!(output, "\n# Files:\n{}", files.join("\n")).unwrap(); + } + + if output.is_empty() { + writeln!(output, "{} is empty.", input.path).unwrap(); + } + + Task::ready(Ok(output)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{TestAppContext, UpdateGlobal}; + use indoc::indoc; + use project::{FakeFs, Project, WorktreeSettings}; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + fn platform_paths(path_str: &str) -> String { + if cfg!(target_os = "windows") { + path_str.replace("/", "\\") + } else { + path_str.to_string() + } + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + }); + } + + #[gpui::test] + async fn test_list_directory_separates_files_and_dirs(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {}", + "lib.rs": "pub fn hello() {}", + "models": { + "user.rs": "struct User {}", + "post.rs": "struct Post {}" + }, + "utils": { + "helper.rs": "pub fn help() {}" + } + }, + "tests": { + "integration_test.rs": "#[test] fn test() {}" + }, + "README.md": "# Project", + "Cargo.toml": "[package]" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let tool = Arc::new(ListDirectoryTool::new(project)); + + // Test listing root directory + let input = ListDirectoryToolInput { + path: "project".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert_eq!( + output, + platform_paths(indoc! {" + # Folders: + project/src + project/tests + + # Files: + project/Cargo.toml + project/README.md + "}) + ); + + // Test listing src directory + let input = ListDirectoryToolInput { + path: "project/src".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert_eq!( + output, + platform_paths(indoc! {" + # Folders: + project/src/models + project/src/utils + + # Files: + project/src/lib.rs + project/src/main.rs + "}) + ); + + // Test listing directory with only files + let input = ListDirectoryToolInput { + path: "project/tests".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert!(!output.contains("# Folders:")); + assert!(output.contains("# Files:")); + assert!(output.contains(&platform_paths("project/tests/integration_test.rs"))); + } + + #[gpui::test] + async fn test_list_directory_empty_directory(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "empty_dir": {} + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let tool = Arc::new(ListDirectoryTool::new(project)); + + let input = ListDirectoryToolInput { + path: "project/empty_dir".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert_eq!(output, "project/empty_dir is empty.\n"); + } + + #[gpui::test] + async fn test_list_directory_error_cases(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "file.txt": "content" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let tool = Arc::new(ListDirectoryTool::new(project)); + + // Test non-existent path + let input = ListDirectoryToolInput { + path: "project/nonexistent".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await; + assert!(output.unwrap_err().to_string().contains("Path not found")); + + // Test trying to list a file instead of directory + let input = ListDirectoryToolInput { + path: "project/file.txt".into(), + }; + let output = cx + .update(|cx| tool.run(input, ToolCallEventStream::test().0, cx)) + .await; + assert!( + output + .unwrap_err() + .to_string() + .contains("is not a directory") + ); + } + + #[gpui::test] + async fn test_list_directory_security(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "normal_dir": { + "file1.txt": "content", + "file2.txt": "content" + }, + ".mysecrets": "SECRET_KEY=abc123", + ".secretdir": { + "config": "special configuration", + "secret.txt": "secret content" + }, + ".mymetadata": "custom metadata", + "visible_dir": { + "normal.txt": "normal content", + "special.privatekey": "private key content", + "data.mysensitive": "sensitive data", + ".hidden_subdir": { + "hidden_file.txt": "hidden content" + } + } + }), + ) + .await; + + // Configure settings explicitly + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.file_scan_exclusions = Some(vec![ + "**/.secretdir".to_string(), + "**/.mymetadata".to_string(), + "**/.hidden_subdir".to_string(), + ]); + settings.private_files = Some(vec![ + "**/.mysecrets".to_string(), + "**/*.privatekey".to_string(), + "**/*.mysensitive".to_string(), + ]); + }); + }); + }); + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let tool = Arc::new(ListDirectoryTool::new(project)); + + // Listing root directory should exclude private and excluded files + let input = ListDirectoryToolInput { + path: "project".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + + // Should include normal directories + assert!(output.contains("normal_dir"), "Should list normal_dir"); + assert!(output.contains("visible_dir"), "Should list visible_dir"); + + // Should NOT include excluded or private files + assert!( + !output.contains(".secretdir"), + "Should not list .secretdir (file_scan_exclusions)" + ); + assert!( + !output.contains(".mymetadata"), + "Should not list .mymetadata (file_scan_exclusions)" + ); + assert!( + !output.contains(".mysecrets"), + "Should not list .mysecrets (private_files)" + ); + + // Trying to list an excluded directory should fail + let input = ListDirectoryToolInput { + path: "project/.secretdir".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await; + assert!( + output + .unwrap_err() + .to_string() + .contains("file_scan_exclusions"), + "Error should mention file_scan_exclusions" + ); + + // Listing a directory should exclude private files within it + let input = ListDirectoryToolInput { + path: "project/visible_dir".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + + // Should include normal files + assert!(output.contains("normal.txt"), "Should list normal.txt"); + + // Should NOT include private files + assert!( + !output.contains("privatekey"), + "Should not list .privatekey files (private_files)" + ); + assert!( + !output.contains("mysensitive"), + "Should not list .mysensitive files (private_files)" + ); + + // Should NOT include subdirectories that match exclusions + assert!( + !output.contains(".hidden_subdir"), + "Should not list .hidden_subdir (file_scan_exclusions)" + ); + } + + #[gpui::test] + async fn test_list_directory_with_multiple_worktree_settings(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + + // Create first worktree with its own private files + fs.insert_tree( + path!("/worktree1"), + json!({ + ".zed": { + "settings.json": r#"{ + "file_scan_exclusions": ["**/fixture.*"], + "private_files": ["**/secret.rs", "**/config.toml"] + }"# + }, + "src": { + "main.rs": "fn main() { println!(\"Hello from worktree1\"); }", + "secret.rs": "const API_KEY: &str = \"secret_key_1\";", + "config.toml": "[database]\nurl = \"postgres://localhost/db1\"" + }, + "tests": { + "test.rs": "mod tests { fn test_it() {} }", + "fixture.sql": "CREATE TABLE users (id INT, name VARCHAR(255));" + } + }), + ) + .await; + + // Create second worktree with different private files + fs.insert_tree( + path!("/worktree2"), + json!({ + ".zed": { + "settings.json": r#"{ + "file_scan_exclusions": ["**/internal.*"], + "private_files": ["**/private.js", "**/data.json"] + }"# + }, + "lib": { + "public.js": "export function greet() { return 'Hello from worktree2'; }", + "private.js": "const SECRET_TOKEN = \"private_token_2\";", + "data.json": "{\"api_key\": \"json_secret_key\"}" + }, + "docs": { + "README.md": "# Public Documentation", + "internal.md": "# Internal Secrets and Configuration" + } + }), + ) + .await; + + // Set global settings + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]); + settings.private_files = Some(vec!["**/.env".to_string()]); + }); + }); + }); + + let project = Project::test( + fs.clone(), + [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], + cx, + ) + .await; + + // Wait for worktrees to be fully scanned + cx.executor().run_until_parked(); + + let tool = Arc::new(ListDirectoryTool::new(project)); + + // Test listing worktree1/src - should exclude secret.rs and config.toml based on local settings + let input = ListDirectoryToolInput { + path: "worktree1/src".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert!(output.contains("main.rs"), "Should list main.rs"); + assert!( + !output.contains("secret.rs"), + "Should not list secret.rs (local private_files)" + ); + assert!( + !output.contains("config.toml"), + "Should not list config.toml (local private_files)" + ); + + // Test listing worktree1/tests - should exclude fixture.sql based on local settings + let input = ListDirectoryToolInput { + path: "worktree1/tests".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert!(output.contains("test.rs"), "Should list test.rs"); + assert!( + !output.contains("fixture.sql"), + "Should not list fixture.sql (local file_scan_exclusions)" + ); + + // Test listing worktree2/lib - should exclude private.js and data.json based on local settings + let input = ListDirectoryToolInput { + path: "worktree2/lib".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert!(output.contains("public.js"), "Should list public.js"); + assert!( + !output.contains("private.js"), + "Should not list private.js (local private_files)" + ); + assert!( + !output.contains("data.json"), + "Should not list data.json (local private_files)" + ); + + // Test listing worktree2/docs - should exclude internal.md based on local settings + let input = ListDirectoryToolInput { + path: "worktree2/docs".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await + .unwrap(); + assert!(output.contains("README.md"), "Should list README.md"); + assert!( + !output.contains("internal.md"), + "Should not list internal.md (local file_scan_exclusions)" + ); + + // Test trying to list an excluded directory directly + let input = ListDirectoryToolInput { + path: "worktree1/src/secret.rs".into(), + }; + let output = cx + .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .await; + assert!( + output + .unwrap_err() + .to_string() + .contains("Cannot list directory"), + ); + } +} diff --git a/crates/agent2/src/tools/move_path_tool.rs b/crates/agent2/src/tools/move_path_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..2a173a4404fc344051d238c6ba377e63ec2d9acc --- /dev/null +++ b/crates/agent2/src/tools/move_path_tool.rs @@ -0,0 +1,120 @@ +use crate::{AgentTool, ToolCallEventStream}; +use agent_client_protocol::ToolKind; +use anyhow::{Context as _, Result, anyhow}; +use gpui::{App, AppContext, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::{path::Path, sync::Arc}; +use util::markdown::MarkdownInlineCode; + +/// Moves or rename a file or directory in the project, and returns confirmation that the move succeeded. +/// +/// If the source and destination directories are the same, but the filename is different, this performs a rename. Otherwise, it performs a move. +/// +/// This tool should be used when it's desirable to move or rename a file or directory without changing its contents at all. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct MovePathToolInput { + /// The source path of the file or directory to move/rename. + /// + /// + /// If the project has the following files: + /// + /// - directory1/a/something.txt + /// - directory2/a/things.txt + /// - directory3/a/other.txt + /// + /// You can move the first file by providing a source_path of "directory1/a/something.txt" + /// + pub source_path: String, + + /// The destination path where the file or directory should be moved/renamed to. + /// If the paths are the same except for the filename, then this will be a rename. + /// + /// + /// To move "directory1/a/something.txt" to "directory2/b/renamed.txt", + /// provide a destination_path of "directory2/b/renamed.txt" + /// + pub destination_path: String, +} + +pub struct MovePathTool { + project: Entity, +} + +impl MovePathTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for MovePathTool { + type Input = MovePathToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "move_path".into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Move + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + let src = MarkdownInlineCode(&input.source_path); + let dest = MarkdownInlineCode(&input.destination_path); + let src_path = Path::new(&input.source_path); + let dest_path = Path::new(&input.destination_path); + + match dest_path + .file_name() + .and_then(|os_str| os_str.to_os_string().into_string().ok()) + { + Some(filename) if src_path.parent() == dest_path.parent() => { + let filename = MarkdownInlineCode(&filename); + format!("Rename {src} to {filename}").into() + } + _ => format!("Move {src} to {dest}").into(), + } + } else { + "Move path".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let rename_task = self.project.update(cx, |project, cx| { + match project + .find_project_path(&input.source_path, cx) + .and_then(|project_path| project.entry_for_path(&project_path, cx)) + { + Some(entity) => match project.find_project_path(&input.destination_path, cx) { + Some(project_path) => project.rename_entry(entity.id, project_path.path, cx), + None => Task::ready(Err(anyhow!( + "Destination path {} was outside the project.", + input.destination_path + ))), + }, + None => Task::ready(Err(anyhow!( + "Source path {} was not found in the project.", + input.source_path + ))), + } + }); + + cx.background_spawn(async move { + let _ = rename_task.await.with_context(|| { + format!("Moving {} to {}", input.source_path, input.destination_path) + })?; + Ok(format!( + "Moved {} to {}", + input.source_path, input.destination_path + )) + }) + } +} diff --git a/crates/agent2/src/tools/now_tool.rs b/crates/agent2/src/tools/now_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..a72ede26fea1ee42eddb08e2b22b2b2b89c77075 --- /dev/null +++ b/crates/agent2/src/tools/now_tool.rs @@ -0,0 +1,59 @@ +use std::sync::Arc; + +use agent_client_protocol as acp; +use anyhow::Result; +use chrono::{Local, Utc}; +use gpui::{App, SharedString, Task}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use crate::{AgentTool, ToolCallEventStream}; + +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum Timezone { + /// Use UTC for the datetime. + Utc, + /// Use local time for the datetime. + Local, +} + +/// Returns the current datetime in RFC 3339 format. +/// Only use this tool when the user specifically asks for it or the current task would benefit from knowing the current datetime. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct NowToolInput { + /// The timezone to use for the datetime. + timezone: Timezone, +} + +pub struct NowTool; + +impl AgentTool for NowTool { + type Input = NowToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "now".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title(&self, _input: Result) -> SharedString { + "Get current time".into() + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> Task> { + let now = match input.timezone { + Timezone::Utc => Utc::now().to_rfc3339(), + Timezone::Local => Local::now().to_rfc3339(), + }; + Task::ready(Ok(format!("The current datetime is {now}."))) + } +} diff --git a/crates/agent2/src/tools/open_tool.rs b/crates/agent2/src/tools/open_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..c20369c2d80cadc99f60c8335b5ecdbef763bf5f --- /dev/null +++ b/crates/agent2/src/tools/open_tool.rs @@ -0,0 +1,166 @@ +use crate::AgentTool; +use agent_client_protocol::ToolKind; +use anyhow::{Context as _, Result}; +use gpui::{App, AppContext, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::{path::PathBuf, sync::Arc}; +use util::markdown::MarkdownEscaped; + +/// This tool opens a file or URL with the default application associated with it on the user's operating system: +/// +/// - On macOS, it's equivalent to the `open` command +/// - On Windows, it's equivalent to `start` +/// - On Linux, it uses something like `xdg-open`, `gio open`, `gnome-open`, `kde-open`, `wslview` as appropriate +/// +/// For example, it can open a web browser with a URL, open a PDF file with the default PDF viewer, etc. +/// +/// You MUST ONLY use this tool when the user has explicitly requested opening something. You MUST NEVER assume that the user would like for you to use this tool. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct OpenToolInput { + /// The path or URL to open with the default application. + path_or_url: String, +} + +pub struct OpenTool { + project: Entity, +} + +impl OpenTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for OpenTool { + type Input = OpenToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "open".into() + } + + fn kind(&self) -> ToolKind { + ToolKind::Execute + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + format!("Open `{}`", MarkdownEscaped(&input.path_or_url)).into() + } else { + "Open file or URL".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + event_stream: crate::ToolCallEventStream, + cx: &mut App, + ) -> Task> { + // If path_or_url turns out to be a path in the project, make it absolute. + let abs_path = to_absolute_path(&input.path_or_url, self.project.clone(), cx); + let authorize = event_stream.authorize(self.initial_title(Ok(input.clone())), cx); + cx.background_spawn(async move { + authorize.await?; + + match abs_path { + Some(path) => open::that(path), + None => open::that(&input.path_or_url), + } + .context("Failed to open URL or file path")?; + + Ok(format!("Successfully opened {}", input.path_or_url)) + }) + } +} + +fn to_absolute_path( + potential_path: &str, + project: Entity, + cx: &mut App, +) -> Option { + let project = project.read(cx); + project + .find_project_path(PathBuf::from(potential_path), cx) + .and_then(|project_path| project.absolute_path(&project_path, cx)) +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use project::{FakeFs, Project}; + use settings::SettingsStore; + use std::path::Path; + use tempfile::TempDir; + + #[gpui::test] + async fn test_to_absolute_path(cx: &mut TestAppContext) { + init_test(cx); + let temp_dir = TempDir::new().expect("Failed to create temp directory"); + let temp_path = temp_dir.path().to_string_lossy().to_string(); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + &temp_path, + serde_json::json!({ + "src": { + "main.rs": "fn main() {}", + "lib.rs": "pub fn lib_fn() {}" + }, + "docs": { + "readme.md": "# Project Documentation" + } + }), + ) + .await; + + // Use the temp_path as the root directory, not just its filename + let project = Project::test(fs.clone(), [temp_dir.path()], cx).await; + + // Test cases where the function should return Some + cx.update(|cx| { + // Project-relative paths should return Some + // Create paths using the last segment of the temp path to simulate a project-relative path + let root_dir_name = Path::new(&temp_path) + .file_name() + .unwrap_or_else(|| std::ffi::OsStr::new("temp")) + .to_string_lossy(); + + assert!( + to_absolute_path(&format!("{root_dir_name}/src/main.rs"), project.clone(), cx) + .is_some(), + "Failed to resolve main.rs path" + ); + + assert!( + to_absolute_path( + &format!("{root_dir_name}/docs/readme.md",), + project.clone(), + cx, + ) + .is_some(), + "Failed to resolve readme.md path" + ); + + // External URL should return None + let result = to_absolute_path("https://example.com", project.clone(), cx); + assert_eq!(result, None, "External URLs should return None"); + + // Path outside project + let result = to_absolute_path("../invalid/path", project.clone(), cx); + assert_eq!(result, None, "Paths outside the project should return None"); + }); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + }); + } +} diff --git a/crates/agent2/src/tools/read_file_tool.rs b/crates/agent2/src/tools/read_file_tool.rs index 30794ccdad43ce11299baa5b9ad33555a0622e37..11a57506fb6454ad3c527e68f43089f5b80216e1 100644 --- a/crates/agent2/src/tools/read_file_tool.rs +++ b/crates/agent2/src/tools/read_file_tool.rs @@ -1,15 +1,16 @@ -use agent_client_protocol::{self as acp}; -use anyhow::{anyhow, Result}; -use assistant_tool::{outline, ActionLog}; -use gpui::{Entity, Task}; +use action_log::ActionLog; +use agent_client_protocol::{self as acp, ToolCallUpdateFields}; +use anyhow::{Context as _, Result, anyhow}; +use assistant_tool::outline; +use gpui::{App, Entity, SharedString, Task}; use indoc::formatdoc; -use language::{Anchor, Point}; -use project::{AgentLocation, Project, WorktreeSettings}; +use language::Point; +use language_model::{LanguageModelImage, LanguageModelToolResultContent}; +use project::{AgentLocation, ImageItem, Project, WorktreeSettings, image_store}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::Settings; use std::sync::Arc; -use ui::{App, SharedString}; use crate::{AgentTool, ToolCallEventStream}; @@ -20,8 +21,7 @@ use crate::{AgentTool, ToolCallEventStream}; pub struct ReadFileToolInput { /// The relative path of the file to read. /// - /// This path should never be absolute, and the first component - /// of the path should always be a root directory in a project. + /// This path should never be absolute, and the first component of the path should always be a root directory in a project. /// /// /// If the project has the following root directories: @@ -33,11 +33,9 @@ pub struct ReadFileToolInput { /// If you want to access `file.txt` in `directory2`, you should use the path `directory2/file.txt`. /// pub path: String, - /// Optional line number to start reading on (1-based index) #[serde(default)] pub start_line: Option, - /// Optional line number to end reading on (1-based index, inclusive) #[serde(default)] pub end_line: Option, @@ -59,6 +57,7 @@ impl ReadFileTool { impl AgentTool for ReadFileTool { type Input = ReadFileToolInput; + type Output = LanguageModelToolResultContent; fn name(&self) -> SharedString { "read_file".into() @@ -68,24 +67,28 @@ impl AgentTool for ReadFileTool { acp::ToolKind::Read } - fn initial_title(&self, input: Self::Input) -> SharedString { - let path = &input.path; - match (input.start_line, input.end_line) { - (Some(start), Some(end)) => { - format!( - "[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))", - path, start, end, path, start, end - ) - } - (Some(start), None) => { - format!( - "[Read file `{}` (from line {})](@selection:{}:({}-{}))", - path, start, path, start, start - ) + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + let path = &input.path; + match (input.start_line, input.end_line) { + (Some(start), Some(end)) => { + format!( + "[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))", + path, start, end, path, start, end + ) + } + (Some(start), None) => { + format!( + "[Read file `{}` (from line {})](@selection:{}:({}-{}))", + path, start, path, start, start + ) + } + _ => format!("[Read file `{}`](@file:{})", path, path), } - _ => format!("[Read file `{}`](@file:{})", path, path), + .into() + } else { + "Read file".into() } - .into() } fn run( @@ -93,7 +96,7 @@ impl AgentTool for ReadFileTool { input: Self::Input, event_stream: ToolCallEventStream, cx: &mut App, - ) -> Task> { + ) -> Task> { let Some(project_path) = self.project.read(cx).find_project_path(&input.path, cx) else { return Task::ready(Err(anyhow!("Path {} not found in project", &input.path))); }; @@ -132,51 +135,27 @@ impl AgentTool for ReadFileTool { let file_path = input.path.clone(); - event_stream.send_update(acp::ToolCallUpdateFields { - locations: Some(vec![acp::ToolCallLocation { - path: project_path.path.to_path_buf(), - line: input.start_line, - // TODO (tracked): use full range - }]), - ..Default::default() - }); - - // TODO (tracked): images - // if image_store::is_image_file(&self.project, &project_path, cx) { - // let model = &self.thread.read(cx).selected_model; - - // if !model.supports_images() { - // return Task::ready(Err(anyhow!( - // "Attempted to read an image, but Zed doesn't currently support sending images to {}.", - // model.name().0 - // ))) - // .into(); - // } - - // return cx.spawn(async move |cx| -> Result { - // let image_entity: Entity = cx - // .update(|cx| { - // self.project.update(cx, |project, cx| { - // project.open_image(project_path.clone(), cx) - // }) - // })? - // .await?; - - // let image = - // image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image))?; - - // let language_model_image = cx - // .update(|cx| LanguageModelImage::from_image(image, cx))? - // .await - // .context("processing image")?; - - // Ok(ToolResultOutput { - // content: ToolResultContent::Image(language_model_image), - // output: None, - // }) - // }); - // } - // + if image_store::is_image_file(&self.project, &project_path, cx) { + return cx.spawn(async move |cx| { + let image_entity: Entity = cx + .update(|cx| { + self.project.update(cx, |project, cx| { + project.open_image(project_path.clone(), cx) + }) + })? + .await?; + + let image = + image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image))?; + + let language_model_image = cx + .update(|cx| LanguageModelImage::from_image(image, cx))? + .await + .context("processing image")?; + + Ok(language_model_image.into()) + }); + } let project = self.project.clone(); let action_log = self.action_log.clone(); @@ -184,31 +163,24 @@ impl AgentTool for ReadFileTool { cx.spawn(async move |cx| { let buffer = cx .update(|cx| { - project.update(cx, |project, cx| project.open_buffer(project_path, cx)) + project.update(cx, |project, cx| { + project.open_buffer(project_path.clone(), cx) + }) })? .await?; if buffer.read_with(cx, |buffer, _| { buffer .file() .as_ref() - .map_or(true, |file| !file.disk_state().exists()) + .is_none_or(|file| !file.disk_state().exists()) })? { anyhow::bail!("{file_path} not found"); } - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: Anchor::MIN, - }), - cx, - ); - })?; + let mut anchor = None; // Check if specific line ranges are provided - if input.start_line.is_some() || input.end_line.is_some() { - let mut anchor = None; + let result = if input.start_line.is_some() || input.end_line.is_some() { let result = buffer.read_with(cx, |buffer, _cx| { let text = buffer.text(); // .max(1) because despite instructions to be 1-indexed, sometimes the model passes 0. @@ -232,19 +204,7 @@ impl AgentTool for ReadFileTool { log.buffer_read(buffer.clone(), cx); })?; - if let Some(anchor) = anchor { - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: anchor, - }), - cx, - ); - })?; - } - - Ok(result) + Ok(result.into()) } else { // No line ranges specified, so check file size to see if it's too big. let file_size = buffer.read_with(cx, |buffer, _cx| buffer.text().len())?; @@ -254,15 +214,16 @@ impl AgentTool for ReadFileTool { let result = buffer.read_with(cx, |buffer, _cx| buffer.text())?; action_log.update(cx, |log, cx| { - log.buffer_read(buffer, cx); + log.buffer_read(buffer.clone(), cx); })?; - Ok(result) + Ok(result.into()) } else { // File is too big, so return the outline // and a suggestion to read again with line numbers. let outline = - outline::file_outline(project, file_path, action_log, None, cx).await?; + outline::file_outline(project.clone(), file_path, action_log, None, cx) + .await?; Ok(formatdoc! {" This file was too big to read all at once. @@ -276,20 +237,40 @@ impl AgentTool for ReadFileTool { Alternatively, you can fall back to the `grep` tool (if available) to search the file for specific content." - }) + } + .into()) } - } + }; + + project.update(cx, |project, cx| { + if let Some(abs_path) = project.absolute_path(&project_path, cx) { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: anchor.unwrap_or(text::Anchor::MIN), + }), + cx, + ); + event_stream.update_fields(ToolCallUpdateFields { + locations: Some(vec![acp::ToolCallLocation { + path: abs_path, + line: input.start_line.map(|line| line.saturating_sub(1)), + }]), + ..Default::default() + }); + } + })?; + + result }) } } #[cfg(test)] mod test { - use crate::TestToolCallEventStream; - use super::*; use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; - use language::{tree_sitter_rust, Language, LanguageConfig, LanguageMatcher}; + use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; @@ -304,7 +285,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project, action_log)); - let event_stream = TestToolCallEventStream::new(); + let (event_stream, _) = ToolCallEventStream::test(); let result = cx .update(|cx| { @@ -313,7 +294,7 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, event_stream.stream(), cx) + tool.run(input, event_stream, cx) }) .await; assert_eq!( @@ -321,6 +302,7 @@ mod test { "root/nonexistent_file.txt not found" ); } + #[gpui::test] async fn test_read_small_file(cx: &mut TestAppContext) { init_test(cx); @@ -336,7 +318,6 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project, action_log)); - let event_stream = TestToolCallEventStream::new(); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -344,10 +325,10 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, event_stream.stream(), cx) + tool.run(input, ToolCallEventStream::test().0, cx) }) .await; - assert_eq!(result.unwrap(), "This is a small file content"); + assert_eq!(result.unwrap(), "This is a small file content".into()); } #[gpui::test] @@ -367,18 +348,18 @@ mod test { language_registry.add(Arc::new(rust_lang())); let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project, action_log)); - let event_stream = TestToolCallEventStream::new(); - let content = cx + let result = cx .update(|cx| { let input = ReadFileToolInput { path: "root/large_file.rs".into(), start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await .unwrap(); + let content = result.to_str().unwrap(); assert_eq!( content.lines().skip(4).take(6).collect::>(), @@ -399,10 +380,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, event_stream.stream(), cx) + tool.run(input, ToolCallEventStream::test().0, cx) }) - .await; - let content = result.unwrap(); + .await + .unwrap(); + let content = result.to_str().unwrap(); let expected_content = (0..1000) .flat_map(|i| { vec![ @@ -438,7 +420,6 @@ mod test { let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project, action_log)); - let event_stream = TestToolCallEventStream::new(); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -446,10 +427,10 @@ mod test { start_line: Some(2), end_line: Some(4), }; - tool.run(input, event_stream.stream(), cx) + tool.run(input, ToolCallEventStream::test().0, cx) }) .await; - assert_eq!(result.unwrap(), "Line 2\nLine 3\nLine 4"); + assert_eq!(result.unwrap(), "Line 2\nLine 3\nLine 4".into()); } #[gpui::test] @@ -467,7 +448,6 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project, action_log)); - let event_stream = TestToolCallEventStream::new(); // start_line of 0 should be treated as 1 let result = cx @@ -477,10 +457,10 @@ mod test { start_line: Some(0), end_line: Some(2), }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; - assert_eq!(result.unwrap(), "Line 1\nLine 2"); + assert_eq!(result.unwrap(), "Line 1\nLine 2".into()); // end_line of 0 should result in at least 1 line let result = cx @@ -490,10 +470,10 @@ mod test { start_line: Some(1), end_line: Some(0), }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; - assert_eq!(result.unwrap(), "Line 1"); + assert_eq!(result.unwrap(), "Line 1".into()); // when start_line > end_line, should still return at least 1 line let result = cx @@ -503,10 +483,10 @@ mod test { start_line: Some(3), end_line: Some(2), }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; - assert_eq!(result.unwrap(), "Line 3"); + assert_eq!(result.unwrap(), "Line 3".into()); } fn init_test(cx: &mut TestAppContext) { @@ -612,7 +592,6 @@ mod test { let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project, action_log)); - let event_stream = TestToolCallEventStream::new(); // Reading a file outside the project worktree should fail let result = cx @@ -622,7 +601,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -638,7 +617,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -654,7 +633,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -669,7 +648,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -685,7 +664,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -700,7 +679,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -715,7 +694,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -731,11 +710,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; assert!(result.is_ok(), "Should be able to read normal files"); - assert_eq!(result.unwrap(), "Normal file content"); + assert_eq!(result.unwrap(), "Normal file content".into()); // Path traversal attempts with .. should fail let result = cx @@ -745,7 +724,7 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, event_stream.stream(), cx) + tool.run(input, ToolCallEventStream::test().0, cx) }) .await; assert!( @@ -826,7 +805,6 @@ mod test { let action_log = cx.new(|_| ActionLog::new(project.clone())); let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone())); - let event_stream = TestToolCallEventStream::new(); // Test reading allowed files in worktree1 let result = cx @@ -836,12 +814,15 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await .unwrap(); - assert_eq!(result, "fn main() { println!(\"Hello from worktree1\"); }"); + assert_eq!( + result, + "fn main() { println!(\"Hello from worktree1\"); }".into() + ); // Test reading private file in worktree1 should fail let result = cx @@ -851,7 +832,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; @@ -872,7 +853,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; @@ -893,14 +874,14 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await .unwrap(); assert_eq!( result, - "export function greet() { return 'Hello from worktree2'; }" + "export function greet() { return 'Hello from worktree2'; }".into() ); // Test reading private file in worktree2 should fail @@ -911,7 +892,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; @@ -932,7 +913,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; @@ -954,7 +935,7 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, event_stream.stream(), cx) + tool.clone().run(input, ToolCallEventStream::test().0, cx) }) .await; diff --git a/crates/agent2/src/tools/terminal_tool.rs b/crates/agent2/src/tools/terminal_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..3d4faf2e031aca522af8c7787b05bf302e45969e --- /dev/null +++ b/crates/agent2/src/tools/terminal_tool.rs @@ -0,0 +1,468 @@ +use agent_client_protocol as acp; +use anyhow::Result; +use futures::{FutureExt as _, future::Shared}; +use gpui::{App, AppContext, Entity, SharedString, Task}; +use project::{Project, terminals::TerminalKind}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; +use util::{ResultExt, get_system_shell, markdown::MarkdownInlineCode}; + +use crate::{AgentTool, ToolCallEventStream}; + +const COMMAND_OUTPUT_LIMIT: usize = 16 * 1024; + +/// Executes a shell one-liner and returns the combined output. +/// +/// This tool spawns a process using the user's shell, reads from stdout and stderr (preserving the order of writes), and returns a string with the combined output result. +/// +/// The output results will be shown to the user already, only list it again if necessary, avoid being redundant. +/// +/// Make sure you use the `cd` parameter to navigate to one of the root directories of the project. NEVER do it as part of the `command` itself, otherwise it will error. +/// +/// Do not use this tool for commands that run indefinitely, such as servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers that don't terminate on their own. +/// +/// Remember that each invocation of this tool will spawn a new shell process, so you can't rely on any state from previous invocations. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct TerminalToolInput { + /// The one-liner command to execute. + command: String, + /// Working directory for the command. This must be one of the root directories of the project. + cd: String, +} + +pub struct TerminalTool { + project: Entity, + determine_shell: Shared>, +} + +impl TerminalTool { + pub fn new(project: Entity, cx: &mut App) -> Self { + let determine_shell = cx.background_spawn(async move { + if cfg!(windows) { + return get_system_shell(); + } + + if which::which("bash").is_ok() { + "bash".into() + } else { + get_system_shell() + } + }); + Self { + project, + determine_shell: determine_shell.shared(), + } + } +} + +impl AgentTool for TerminalTool { + type Input = TerminalToolInput; + type Output = String; + + fn name(&self) -> SharedString { + "terminal".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Execute + } + + fn initial_title(&self, input: Result) -> SharedString { + if let Ok(input) = input { + let mut lines = input.command.lines(); + let first_line = lines.next().unwrap_or_default(); + let remaining_line_count = lines.count(); + match remaining_line_count { + 0 => MarkdownInlineCode(first_line).to_string().into(), + 1 => MarkdownInlineCode(&format!( + "{} - {} more line", + first_line, remaining_line_count + )) + .to_string() + .into(), + n => MarkdownInlineCode(&format!("{} - {} more lines", first_line, n)) + .to_string() + .into(), + } + } else { + "Run terminal command".into() + } + } + + fn run( + self: Arc, + input: Self::Input, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let language_registry = self.project.read(cx).languages().clone(); + let working_dir = match working_dir(&input, &self.project, cx) { + Ok(dir) => dir, + Err(err) => return Task::ready(Err(err)), + }; + let program = self.determine_shell.clone(); + let command = if cfg!(windows) { + format!("$null | & {{{}}}", input.command.replace("\"", "'")) + } else if let Some(cwd) = working_dir + .as_ref() + .and_then(|cwd| cwd.as_os_str().to_str()) + { + // Make sure once we're *inside* the shell, we cd into `cwd` + format!("(cd {cwd}; {}) self.project.update(cx, |project, cx| { + project.directory_environment(dir.as_path().into(), cx) + }), + None => Task::ready(None).shared(), + }; + + let env = cx.spawn(async move |_| { + let mut env = env.await.unwrap_or_default(); + if cfg!(unix) { + env.insert("PAGER".into(), "cat".into()); + } + env + }); + + let authorize = event_stream.authorize(self.initial_title(Ok(input.clone())), cx); + + cx.spawn({ + async move |cx| { + authorize.await?; + + let program = program.await; + let env = env.await; + let terminal = self + .project + .update(cx, |project, cx| { + project.create_terminal( + TerminalKind::Task(task::SpawnInTerminal { + command: Some(program), + args, + cwd: working_dir.clone(), + env, + ..Default::default() + }), + cx, + ) + })? + .await?; + let acp_terminal = cx.new(|cx| { + acp_thread::Terminal::new( + input.command.clone(), + working_dir.clone(), + terminal.clone(), + language_registry, + cx, + ) + })?; + event_stream.update_terminal(acp_terminal.clone()); + + let exit_status = terminal + .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))? + .await; + let (content, content_line_count) = terminal.read_with(cx, |terminal, _| { + (terminal.get_content(), terminal.total_lines()) + })?; + + let (processed_content, finished_with_empty_output) = process_content( + &content, + &input.command, + exit_status.map(portable_pty::ExitStatus::from), + ); + + acp_terminal + .update(cx, |terminal, cx| { + terminal.finish( + exit_status, + content.len(), + processed_content.len(), + content_line_count, + finished_with_empty_output, + cx, + ); + }) + .log_err(); + + Ok(processed_content) + } + }) + } +} + +fn process_content( + content: &str, + command: &str, + exit_status: Option, +) -> (String, bool) { + let should_truncate = content.len() > COMMAND_OUTPUT_LIMIT; + + let content = if should_truncate { + let mut end_ix = COMMAND_OUTPUT_LIMIT.min(content.len()); + while !content.is_char_boundary(end_ix) { + end_ix -= 1; + } + // Don't truncate mid-line, clear the remainder of the last line + end_ix = content[..end_ix].rfind('\n').unwrap_or(end_ix); + &content[..end_ix] + } else { + content + }; + let content = content.trim(); + let is_empty = content.is_empty(); + let content = format!("```\n{content}\n```"); + let content = if should_truncate { + format!( + "Command output too long. The first {} bytes:\n\n{content}", + content.len(), + ) + } else { + content + }; + + let content = match exit_status { + Some(exit_status) if exit_status.success() => { + if is_empty { + "Command executed successfully.".to_string() + } else { + content + } + } + Some(exit_status) => { + if is_empty { + format!( + "Command \"{command}\" failed with exit code {}.", + exit_status.exit_code() + ) + } else { + format!( + "Command \"{command}\" failed with exit code {}.\n\n{content}", + exit_status.exit_code() + ) + } + } + None => { + format!( + "Command failed or was interrupted.\nPartial output captured:\n\n{}", + content, + ) + } + }; + (content, is_empty) +} + +fn working_dir( + input: &TerminalToolInput, + project: &Entity, + cx: &mut App, +) -> Result> { + let project = project.read(cx); + let cd = &input.cd; + + if cd == "." || cd.is_empty() { + // Accept "." or "" as meaning "the one worktree" if we only have one worktree. + let mut worktrees = project.worktrees(cx); + + match worktrees.next() { + Some(worktree) => { + anyhow::ensure!( + worktrees.next().is_none(), + "'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.", + ); + Ok(Some(worktree.read(cx).abs_path().to_path_buf())) + } + None => Ok(None), + } + } else { + let input_path = Path::new(cd); + + if input_path.is_absolute() { + // Absolute paths are allowed, but only if they're in one of the project's worktrees. + if project + .worktrees(cx) + .any(|worktree| input_path.starts_with(&worktree.read(cx).abs_path())) + { + return Ok(Some(input_path.into())); + } + } else if let Some(worktree) = project.worktree_for_root_name(cd, cx) { + return Ok(Some(worktree.read(cx).abs_path().to_path_buf())); + } + + anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees."); + } +} + +#[cfg(test)] +mod tests { + use agent_settings::AgentSettings; + use editor::EditorSettings; + use fs::RealFs; + use gpui::{BackgroundExecutor, TestAppContext}; + use pretty_assertions::assert_eq; + use serde_json::json; + use settings::{Settings, SettingsStore}; + use terminal::terminal_settings::TerminalSettings; + use theme::ThemeSettings; + use util::test::TempTree; + + use crate::ThreadEvent; + + use super::*; + + fn init_test(executor: &BackgroundExecutor, cx: &mut TestAppContext) { + zlog::init_test(); + + executor.allow_parking(); + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + ThemeSettings::register(cx); + TerminalSettings::register(cx); + EditorSettings::register(cx); + AgentSettings::register(cx); + }); + } + + #[gpui::test] + async fn test_interactive_command(executor: BackgroundExecutor, cx: &mut TestAppContext) { + if cfg!(windows) { + return; + } + + init_test(&executor, cx); + + let fs = Arc::new(RealFs::new(None, executor)); + let tree = TempTree::new(json!({ + "project": {}, + })); + let project: Entity = + Project::test(fs, [tree.path().join("project").as_path()], cx).await; + + let input = TerminalToolInput { + command: "cat".to_owned(), + cd: tree + .path() + .join("project") + .as_path() + .to_string_lossy() + .to_string(), + }; + let (event_stream_tx, mut event_stream_rx) = ToolCallEventStream::test(); + let result = cx + .update(|cx| Arc::new(TerminalTool::new(project, cx)).run(input, event_stream_tx, cx)); + + let auth = event_stream_rx.expect_authorization().await; + auth.response.send(auth.options[0].id.clone()).unwrap(); + event_stream_rx.expect_terminal().await; + assert_eq!(result.await.unwrap(), "Command executed successfully."); + } + + #[gpui::test] + async fn test_working_directory(executor: BackgroundExecutor, cx: &mut TestAppContext) { + if cfg!(windows) { + return; + } + + init_test(&executor, cx); + + let fs = Arc::new(RealFs::new(None, executor)); + let tree = TempTree::new(json!({ + "project": {}, + "other-project": {}, + })); + let project: Entity = + Project::test(fs, [tree.path().join("project").as_path()], cx).await; + + let check = |input, expected, cx: &mut TestAppContext| { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let result = cx.update(|cx| { + Arc::new(TerminalTool::new(project.clone(), cx)).run(input, stream_tx, cx) + }); + cx.run_until_parked(); + let event = stream_rx.try_next(); + if let Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(auth)))) = event { + auth.response.send(auth.options[0].id.clone()).unwrap(); + } + + cx.spawn(async move |_| { + let output = result.await; + assert_eq!(output.ok(), expected); + }) + }; + + check( + TerminalToolInput { + command: "pwd".into(), + cd: ".".into(), + }, + Some(format!( + "```\n{}\n```", + tree.path().join("project").display() + )), + cx, + ) + .await; + + check( + TerminalToolInput { + command: "pwd".into(), + cd: "other-project".into(), + }, + None, // other-project is a dir, but *not* a worktree (yet) + cx, + ) + .await; + + // Absolute path above the worktree root + check( + TerminalToolInput { + command: "pwd".into(), + cd: tree.path().to_string_lossy().into(), + }, + None, + cx, + ) + .await; + + project + .update(cx, |project, cx| { + project.create_worktree(tree.path().join("other-project"), true, cx) + }) + .await + .unwrap(); + + check( + TerminalToolInput { + command: "pwd".into(), + cd: "other-project".into(), + }, + Some(format!( + "```\n{}\n```", + tree.path().join("other-project").display() + )), + cx, + ) + .await; + + check( + TerminalToolInput { + command: "pwd".into(), + cd: ".".into(), + }, + None, + cx, + ) + .await; + } +} diff --git a/crates/agent2/src/tools/thinking_tool.rs b/crates/agent2/src/tools/thinking_tool.rs index bb85d8ecebd7870436a7b97c96af241c3d656665..c5e94511621768868cd56f165b4f50c903874e0d 100644 --- a/crates/agent2/src/tools/thinking_tool.rs +++ b/crates/agent2/src/tools/thinking_tool.rs @@ -11,8 +11,7 @@ use crate::{AgentTool, ToolCallEventStream}; /// Use this tool when you need to work through complex problems, develop strategies, or outline approaches before taking action. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct ThinkingToolInput { - /// Content to think about. This should be a description of what to think about or - /// a problem to solve. + /// Content to think about. This should be a description of what to think about or a problem to solve. content: String, } @@ -20,6 +19,7 @@ pub struct ThinkingTool; impl AgentTool for ThinkingTool { type Input = ThinkingToolInput; + type Output = String; fn name(&self) -> SharedString { "thinking".into() @@ -29,7 +29,7 @@ impl AgentTool for ThinkingTool { acp::ToolKind::Think } - fn initial_title(&self, _input: Self::Input) -> SharedString { + fn initial_title(&self, _input: Result) -> SharedString { "Thinking".into() } @@ -39,7 +39,7 @@ impl AgentTool for ThinkingTool { event_stream: ToolCallEventStream, _cx: &mut App, ) -> Task> { - event_stream.send_update(acp::ToolCallUpdateFields { + event_stream.update_fields(acp::ToolCallUpdateFields { content: Some(vec![input.content.into()]), ..Default::default() }); diff --git a/crates/agent2/src/tools/web_search_tool.rs b/crates/agent2/src/tools/web_search_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..ffcd4ad3becf0417aa6175808614c71abdd95e8e --- /dev/null +++ b/crates/agent2/src/tools/web_search_tool.rs @@ -0,0 +1,127 @@ +use std::sync::Arc; + +use crate::{AgentTool, ToolCallEventStream}; +use agent_client_protocol as acp; +use anyhow::{Result, anyhow}; +use cloud_llm_client::WebSearchResponse; +use gpui::{App, AppContext, Task}; +use language_model::{ + LanguageModelProviderId, LanguageModelToolResultContent, ZED_CLOUD_PROVIDER_ID, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use ui::prelude::*; +use web_search::WebSearchRegistry; + +/// Search the web for information using your query. +/// Use this when you need real-time information, facts, or data that might not be in your training. +/// Results will include snippets and links from relevant web pages. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct WebSearchToolInput { + /// The search term or question to query on the web. + query: String, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(transparent)] +pub struct WebSearchToolOutput(WebSearchResponse); + +impl From for LanguageModelToolResultContent { + fn from(value: WebSearchToolOutput) -> Self { + serde_json::to_string(&value.0) + .expect("Failed to serialize WebSearchResponse") + .into() + } +} + +pub struct WebSearchTool; + +impl AgentTool for WebSearchTool { + type Input = WebSearchToolInput; + type Output = WebSearchToolOutput; + + fn name(&self) -> SharedString { + "web_search".into() + } + + fn kind(&self) -> acp::ToolKind { + acp::ToolKind::Fetch + } + + fn initial_title(&self, _input: Result) -> SharedString { + "Searching the Web".into() + } + + /// We currently only support Zed Cloud as a provider. + fn supported_provider(&self, provider: &LanguageModelProviderId) -> bool { + provider == &ZED_CLOUD_PROVIDER_ID + } + + fn run( + self: Arc, + input: Self::Input, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else { + return Task::ready(Err(anyhow!("Web search is not available."))); + }; + + let search_task = provider.search(input.query, cx); + cx.background_spawn(async move { + let response = match search_task.await { + Ok(response) => response, + Err(err) => { + event_stream.update_fields(acp::ToolCallUpdateFields { + title: Some("Web Search Failed".to_string()), + ..Default::default() + }); + return Err(err); + } + }; + + emit_update(&response, &event_stream); + Ok(WebSearchToolOutput(response)) + }) + } + + fn replay( + &self, + _input: Self::Input, + output: Self::Output, + event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> Result<()> { + emit_update(&output.0, &event_stream); + Ok(()) + } +} + +fn emit_update(response: &WebSearchResponse, event_stream: &ToolCallEventStream) { + let result_text = if response.results.len() == 1 { + "1 result".to_string() + } else { + format!("{} results", response.results.len()) + }; + event_stream.update_fields(acp::ToolCallUpdateFields { + title: Some(format!("Searched the web: {result_text}")), + content: Some( + response + .results + .iter() + .map(|result| acp::ToolCallContent::Content { + content: acp::ContentBlock::ResourceLink(acp::ResourceLink { + name: result.title.clone(), + uri: result.url.clone(), + title: Some(result.title.clone()), + description: Some(result.text.clone()), + mime_type: None, + annotations: None, + size: None, + }), + }) + .collect(), + ), + ..Default::default() + }); +} diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 81c97c8aa6cc4fa64d017b97ade5ddd535487b81..60dd7964639ef59794b6e8bbe11192d9a33cbe01 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -6,7 +6,7 @@ publish.workspace = true license = "GPL-3.0-or-later" [features] -test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support"] +test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support", "dep:env_logger", "fs", "client/test-support", "dep:gpui_tokio", "reqwest_client/test-support"] e2e = [] [lints] @@ -18,20 +18,31 @@ doctest = false [dependencies] acp_thread.workspace = true +action_log.workspace = true agent-client-protocol.workspace = true +agent_settings.workspace = true agentic-coding-protocol.workspace = true anyhow.workspace = true +client = { workspace = true, optional = true } collections.workspace = true context_server.workspace = true +env_logger = { workspace = true, optional = true } +fs = { workspace = true, optional = true } futures.workspace = true gpui.workspace = true +gpui_tokio = { workspace = true, optional = true } indoc.workspace = true itertools.workspace = true +language.workspace = true +language_model.workspace = true +language_models.workspace = true log.workspace = true paths.workspace = true project.workspace = true rand.workspace = true +reqwest_client = { workspace = true, optional = true } schemars.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true @@ -51,8 +62,12 @@ libc.workspace = true nix.workspace = true [dev-dependencies] +client = { workspace = true, features = ["test-support"] } env_logger.workspace = true +fs.workspace = true language.workspace = true indoc.workspace = true acp_thread = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +gpui_tokio.workspace = true +reqwest_client = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 00e3e3df5093c6f1acef32665ab0d3d8846fc39f..1cfb1fcabf3a0a4cc009ab0912188437aef17c2c 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -19,14 +19,14 @@ pub async fn connect( root_dir: &Path, cx: &mut AsyncApp, ) -> Result> { - let conn = v1::AcpConnection::stdio(server_name, command.clone(), &root_dir, cx).await; + let conn = v1::AcpConnection::stdio(server_name, command.clone(), root_dir, cx).await; match conn { Ok(conn) => Ok(Rc::new(conn) as _), Err(err) if err.is::() => { // Consider re-using initialize response and subprocess when adding another version here let conn: Rc = - Rc::new(v0::AcpConnection::stdio(server_name, command, &root_dir, cx).await?); + Rc::new(v0::AcpConnection::stdio(server_name, command, root_dir, cx).await?); Ok(conn) } Err(err) => Err(err), diff --git a/crates/agent_servers/src/acp/v0.rs b/crates/agent_servers/src/acp/v0.rs index 8d85435f92779e478334fb5b920a3af610287ffe..be960489293f6db935d5d4cba0160ed807079c64 100644 --- a/crates/agent_servers/src/acp/v0.rs +++ b/crates/agent_servers/src/acp/v0.rs @@ -1,11 +1,12 @@ // Translates old acp agents into the new schema +use action_log::ActionLog; use agent_client_protocol as acp; use agentic_coding_protocol::{self as acp_old, AgentRequest as _}; use anyhow::{Context as _, Result, anyhow}; use futures::channel::oneshot; use gpui::{AppContext as _, AsyncApp, Entity, Task, WeakEntity}; use project::Project; -use std::{cell::RefCell, path::Path, rc::Rc}; +use std::{any::Any, cell::RefCell, path::Path, rc::Rc}; use ui::App; use util::ResultExt as _; @@ -135,9 +136,9 @@ impl acp_old::Client for OldAcpClientDelegate { let response = cx .update(|cx| { self.thread.borrow().update(cx, |thread, cx| { - thread.request_tool_call_authorization(tool_call, acp_options, cx) + thread.request_tool_call_authorization(tool_call.into(), acp_options, cx) }) - })? + })?? .context("Failed to update thread")? .await; @@ -148,7 +149,7 @@ impl acp_old::Client for OldAcpClientDelegate { Ok(acp_old::RequestToolCallConfirmationResponse { id: acp_old::ToolCallId(old_acp_id), - outcome: outcome, + outcome, }) } @@ -168,7 +169,7 @@ impl acp_old::Client for OldAcpClientDelegate { cx, ) }) - })? + })?? .context("Failed to update thread")?; Ok(acp_old::PushToolCallResponse { @@ -265,7 +266,7 @@ impl acp_old::Client for OldAcpClientDelegate { fn into_new_tool_call(id: acp::ToolCallId, request: acp_old::PushToolCallParams) -> acp::ToolCall { acp::ToolCall { - id: id, + id, title: request.label, kind: acp_kind_from_old_icon(request.icon), status: acp::ToolCallStatus::InProgress, @@ -423,7 +424,7 @@ impl AgentConnection for AcpConnection { self: Rc, project: Entity, _cwd: &Path, - cx: &mut AsyncApp, + cx: &mut App, ) -> Task>> { let task = self.connection.request_any( acp_old::InitializeParams { @@ -437,13 +438,14 @@ impl AgentConnection for AcpConnection { let result = acp_old::InitializeParams::response_from_any(result)?; if !result.is_authenticated { - anyhow::bail!(AuthRequired) + anyhow::bail!(AuthRequired::new()) } cx.update(|cx| { let thread = cx.new(|cx| { let session_id = acp::SessionId("acp-old-no-id".into()); - AcpThread::new(self.name, self.clone(), project, session_id, cx) + let action_log = cx.new(|_| ActionLog::new(project.clone())); + AcpThread::new(self.name, self.clone(), project, action_log, session_id) }); current_thread.replace(thread.downgrade()); thread @@ -467,6 +469,7 @@ impl AgentConnection for AcpConnection { fn prompt( &self, + _id: Option, params: acp::PromptRequest, cx: &mut App, ) -> Task> { @@ -495,6 +498,14 @@ impl AgentConnection for AcpConnection { }) } + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + acp::PromptCapabilities { + image: false, + audio: false, + embedded_context: false, + } + } + fn cancel(&self, _session_id: &acp::SessionId, cx: &mut App) { let task = self .connection @@ -506,4 +517,8 @@ impl AgentConnection for AcpConnection { }) .detach_and_log_err(cx) } + + fn into_any(self: Rc) -> Rc { + self + } } diff --git a/crates/agent_servers/src/acp/v1.rs b/crates/agent_servers/src/acp/v1.rs index ff71783b4890be905ce5d3a68202c6ea0371c12f..29f389547d0ae5daecca85a95cb8b9b63530fc34 100644 --- a/crates/agent_servers/src/acp/v1.rs +++ b/crates/agent_servers/src/acp/v1.rs @@ -1,28 +1,34 @@ -use agent_client_protocol::{self as acp, Agent as _}; +use action_log::ActionLog; +use agent_client_protocol::{self as acp, Agent as _, ErrorCode}; use anyhow::anyhow; use collections::HashMap; +use futures::AsyncBufReadExt as _; use futures::channel::oneshot; +use futures::io::BufReader; use project::Project; -use std::cell::RefCell; +use serde::Deserialize; use std::path::Path; use std::rc::Rc; +use std::{any::Any, cell::RefCell}; use anyhow::{Context as _, Result}; use gpui::{App, AppContext as _, AsyncApp, Entity, Task, WeakEntity}; use crate::{AgentServerCommand, acp::UnsupportedVersion}; -use acp_thread::{AcpThread, AgentConnection, AuthRequired}; +use acp_thread::{AcpThread, AgentConnection, AuthRequired, LoadError}; pub struct AcpConnection { server_name: &'static str, connection: Rc, sessions: Rc>>, auth_methods: Vec, + prompt_capabilities: acp::PromptCapabilities, _io_task: Task>, } pub struct AcpSession { thread: WeakEntity, + suppress_abort_err: bool, } const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1; @@ -40,12 +46,13 @@ impl AcpConnection { .current_dir(root_dir) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::inherit()) + .stderr(std::process::Stdio::piped()) .kill_on_drop(true) .spawn()?; - let stdout = child.stdout.take().expect("Failed to take stdout"); - let stdin = child.stdin.take().expect("Failed to take stdin"); + let stdout = child.stdout.take().context("Failed to take stdout")?; + let stdin = child.stdin.take().context("Failed to take stdin")?; + let stderr = child.stderr.take().context("Failed to take stderr")?; log::trace!("Spawned (pid: {})", child.id()); let sessions = Rc::new(RefCell::new(HashMap::default())); @@ -63,6 +70,18 @@ impl AcpConnection { let io_task = cx.background_spawn(io_task); + cx.background_spawn(async move { + let mut stderr = BufReader::new(stderr); + let mut line = String::new(); + while let Ok(n) = stderr.read_line(&mut line).await + && n > 0 + { + log::warn!("agent stderr: {}", &line); + line.clear(); + } + }) + .detach(); + cx.spawn({ let sessions = sessions.clone(); async move |cx| { @@ -71,7 +90,9 @@ impl AcpConnection { for session in sessions.borrow().values() { session .thread - .update(cx, |thread, cx| thread.emit_server_exited(status, cx)) + .update(cx, |thread, cx| { + thread.emit_load_error(LoadError::Exited { status }, cx) + }) .ok(); } @@ -101,6 +122,7 @@ impl AcpConnection { connection: connection.into(), server_name, sessions, + prompt_capabilities: response.agent_capabilities.prompt_capabilities, _io_task: io_task, }) } @@ -111,7 +133,7 @@ impl AgentConnection for AcpConnection { self: Rc, project: Entity, cwd: &Path, - cx: &mut AsyncApp, + cx: &mut App, ) -> Task>> { let conn = self.connection.clone(); let sessions = self.sessions.clone(); @@ -125,26 +147,33 @@ impl AgentConnection for AcpConnection { .await .map_err(|err| { if err.code == acp::ErrorCode::AUTH_REQUIRED.code { - anyhow!(AuthRequired) + let mut error = AuthRequired::new(); + + if err.message != acp::ErrorCode::AUTH_REQUIRED.message { + error = error.with_description(err.message); + } + + anyhow!(error) } else { anyhow!(err) } })?; let session_id = response.session_id; - - let thread = cx.new(|cx| { + let action_log = cx.new(|_| ActionLog::new(project.clone()))?; + let thread = cx.new(|_cx| { AcpThread::new( self.server_name, self.clone(), project, + action_log, session_id.clone(), - cx, ) })?; let session = AcpSession { thread: thread.downgrade(), + suppress_abort_err: false, }; sessions.borrow_mut().insert(session_id, session); @@ -171,17 +200,69 @@ impl AgentConnection for AcpConnection { fn prompt( &self, + _id: Option, params: acp::PromptRequest, cx: &mut App, ) -> Task> { let conn = self.connection.clone(); + let sessions = self.sessions.clone(); + let session_id = params.session_id.clone(); cx.foreground_executor().spawn(async move { - let response = conn.prompt(params).await?; - Ok(response) + let result = conn.prompt(params).await; + + let mut suppress_abort_err = false; + + if let Some(session) = sessions.borrow_mut().get_mut(&session_id) { + suppress_abort_err = session.suppress_abort_err; + session.suppress_abort_err = false; + } + + match result { + Ok(response) => Ok(response), + Err(err) => { + if err.code != ErrorCode::INTERNAL_ERROR.code { + anyhow::bail!(err) + } + + let Some(data) = &err.data else { + anyhow::bail!(err) + }; + + // Temporary workaround until the following PR is generally available: + // https://github.com/google-gemini/gemini-cli/pull/6656 + + #[derive(Deserialize)] + #[serde(deny_unknown_fields)] + struct ErrorDetails { + details: Box, + } + + match serde_json::from_value(data.clone()) { + Ok(ErrorDetails { details }) => { + if suppress_abort_err && details.contains("This operation was aborted") + { + Ok(acp::PromptResponse { + stop_reason: acp::StopReason::Cancelled, + }) + } else { + Err(anyhow!(details)) + } + } + Err(_) => Err(anyhow!(err)), + } + } + } }) } + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + self.prompt_capabilities + } + fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { + if let Some(session) = self.sessions.borrow_mut().get_mut(session_id) { + session.suppress_abort_err = true; + } let conn = self.connection.clone(); let params = acp::CancelNotification { session_id: session_id.clone(), @@ -190,6 +271,10 @@ impl AgentConnection for AcpConnection { .spawn(async move { conn.cancel(params).await }) .detach(); } + + fn into_any(self: Rc) -> Rc { + self + } } struct ClientDelegate { @@ -213,7 +298,7 @@ impl acp::Client for ClientDelegate { thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx) })?; - let result = rx.await; + let result = rx?.await; let outcome = match result { Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option }, diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index b3b8a3317049927986a6a578bc50c4e5506b7650..2f5ec478ae8288f4e4db3db84c622d2c03c615be 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -3,8 +3,8 @@ mod claude; mod gemini; mod settings; -#[cfg(test)] -mod e2e_tests; +#[cfg(any(test, feature = "test-support"))] +pub mod e2e_tests; pub use claude::*; pub use gemini::*; @@ -18,6 +18,7 @@ use project::Project; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::{ + any::Any, path::{Path, PathBuf}, rc::Rc, sync::Arc, @@ -40,6 +41,14 @@ pub trait AgentServer: Send { project: &Entity, cx: &mut App, ) -> Task>>; + + fn into_any(self: Rc) -> Rc; +} + +impl dyn AgentServer { + pub fn downcast(self: Rc) -> Option> { + self.into_any().downcast().ok() + } } impl std::fmt::Debug for AgentServerCommand { @@ -95,7 +104,7 @@ impl AgentServerCommand { cx: &mut AsyncApp, ) -> Option { if let Some(agent_settings) = settings { - return Some(Self { + Some(Self { path: agent_settings.command.path, args: agent_settings .command @@ -104,7 +113,7 @@ impl AgentServerCommand { .chain(extra_args.iter().map(|arg| arg.to_string())) .collect(), env: agent_settings.command.env, - }); + }) } else { match find_bin_in_path(path_bin_name, project, cx).await { Some(path) => Some(Self { diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index c65508f1520b9b03d866df606c2272ccf43d8dbd..ef666974f1e6a29345e469d8cf19fe13a3fd02eb 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -1,19 +1,27 @@ +mod edit_tool; mod mcp_server; +mod permission_tool; +mod read_tool; pub mod tools; +mod write_tool; +use action_log::ActionLog; use collections::HashMap; use context_server::listener::McpServerTool; +use language_models::provider::anthropic::AnthropicLanguageModelProvider; use project::Project; use settings::SettingsStore; use smol::process::Child; +use std::any::Any; use std::cell::RefCell; use std::fmt::Display; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::rc::Rc; +use util::command::new_smol_command; use uuid::Uuid; use agent_client_protocol as acp; -use anyhow::{Result, anyhow}; +use anyhow::{Context as _, Result, anyhow}; use futures::channel::oneshot; use futures::{AsyncBufReadExt, AsyncWriteExt}; use futures::{ @@ -29,7 +37,7 @@ use util::{ResultExt, debug_panic}; use crate::claude::mcp_server::{ClaudeZedMcpServer, McpConfig}; use crate::claude::tools::ClaudeTool; use crate::{AgentServer, AgentServerCommand, AllAgentServersSettings}; -use acp_thread::{AcpThread, AgentConnection}; +use acp_thread::{AcpThread, AgentConnection, AuthRequired, LoadError, MentionUri}; #[derive(Clone)] pub struct ClaudeCode; @@ -63,6 +71,10 @@ impl AgentServer for ClaudeCode { Task::ready(Ok(Rc::new(connection) as _)) } + + fn into_any(self: Rc) -> Rc { + self + } } struct ClaudeAgentConnection { @@ -74,12 +86,47 @@ impl AgentConnection for ClaudeAgentConnection { self: Rc, project: Entity, cwd: &Path, - cx: &mut AsyncApp, + cx: &mut App, ) -> Task>> { let cwd = cwd.to_owned(); cx.spawn(async move |cx| { + let settings = cx.read_global(|settings: &SettingsStore, _| { + settings.get::(None).claude.clone() + })?; + + let Some(command) = AgentServerCommand::resolve( + "claude", + &[], + Some(&util::paths::home_dir().join(".claude/local/claude")), + settings, + &project, + cx, + ) + .await + else { + return Err(LoadError::NotInstalled { + error_message: "Failed to find Claude Code binary".into(), + install_message: "Install Claude Code".into(), + install_command: "npm install -g @anthropic-ai/claude-code@latest".into(), + }.into()); + }; + + let api_key = + cx.update(AnthropicLanguageModelProvider::api_key)? + .await + .map_err(|err| { + if err.is::() { + anyhow!(AuthRequired::new().with_language_model_provider( + language_model::ANTHROPIC_PROVIDER_ID + )) + } else { + anyhow!(err) + } + })?; + let (mut thread_tx, thread_rx) = watch::channel(WeakEntity::new_invalid()); - let permission_mcp_server = ClaudeZedMcpServer::new(thread_rx.clone(), cx).await?; + let fs = project.read_with(cx, |project, _cx| project.fs().clone())?; + let permission_mcp_server = ClaudeZedMcpServer::new(thread_rx.clone(), fs, cx).await?; let mut mcp_servers = HashMap::default(); mcp_servers.insert( @@ -97,23 +144,6 @@ impl AgentConnection for ClaudeAgentConnection { .await?; mcp_config_file.flush().await?; - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).claude.clone() - })?; - - let Some(command) = AgentServerCommand::resolve( - "claude", - &[], - Some(&util::paths::home_dir().join(".claude/local/claude")), - settings, - &project, - cx, - ) - .await - else { - anyhow::bail!("Failed to find claude binary"); - }; - let (incoming_message_tx, mut incoming_message_rx) = mpsc::unbounded(); let (outgoing_tx, outgoing_rx) = mpsc::unbounded(); @@ -125,16 +155,30 @@ impl AgentConnection for ClaudeAgentConnection { &command, ClaudeSessionMode::Start, session_id.clone(), + api_key, &mcp_config_path, &cwd, )?; - let stdin = child.stdin.take().unwrap(); - let stdout = child.stdout.take().unwrap(); + let stdout = child.stdout.take().context("Failed to take stdout")?; + let stdin = child.stdin.take().context("Failed to take stdin")?; + let stderr = child.stderr.take().context("Failed to take stderr")?; let pid = child.id(); log::trace!("Spawned (pid: {})", pid); + cx.background_spawn(async move { + let mut stderr = BufReader::new(stderr); + let mut line = String::new(); + while let Ok(n) = stderr.read_line(&mut line).await + && n > 0 + { + log::warn!("agent stderr: {}", &line); + line.clear(); + } + }) + .detach(); + cx.background_spawn(async move { let mut outgoing_rx = Some(outgoing_rx); @@ -169,20 +213,50 @@ impl AgentConnection for ClaudeAgentConnection { .await } - if let Some(status) = child.status().await.log_err() { - if let Some(thread) = thread_rx.recv().await.ok() { - thread - .update(cx, |thread, cx| { - thread.emit_server_exited(status, cx); - }) - .ok(); - } + if let Some(status) = child.status().await.log_err() + && let Some(thread) = thread_rx.recv().await.ok() + { + let version = claude_version(command.path.clone(), cx).await.log_err(); + let help = claude_help(command.path.clone(), cx).await.log_err(); + thread + .update(cx, |thread, cx| { + let error = if let Some(version) = version + && let Some(help) = help + && (!help.contains("--input-format") + || !help.contains("--session-id")) + { + LoadError::Unsupported { + error_message: format!( + "Your installed version of Claude Code ({}, version {}) does not have required features for use with Zed.", + command.path.to_string_lossy(), + version, + ) + .into(), + upgrade_message: "Upgrade Claude Code to latest".into(), + upgrade_command: format!( + "{} update", + command.path.to_string_lossy() + ), + } + } else { + LoadError::Exited { status } + }; + thread.emit_load_error(error, cx); + }) + .ok(); } } }); - let thread = cx.new(|cx| { - AcpThread::new("Claude Code", self.clone(), project, session_id.clone(), cx) + let action_log = cx.new(|_| ActionLog::new(project.clone()))?; + let thread = cx.new(|_cx| { + AcpThread::new( + "Claude Code", + self.clone(), + project, + action_log, + session_id.clone(), + ) })?; thread_tx.send(thread.downgrade())?; @@ -210,6 +284,7 @@ impl AgentConnection for ClaudeAgentConnection { fn prompt( &self, + _id: Option, params: acp::PromptRequest, cx: &mut App, ) -> Task> { @@ -224,27 +299,12 @@ impl AgentConnection for ClaudeAgentConnection { let (end_tx, end_rx) = oneshot::channel(); session.turn_state.replace(TurnState::InProgress { end_tx }); - let mut content = String::new(); - for chunk in params.prompt { - match chunk { - acp::ContentBlock::Text(text_content) => { - content.push_str(&text_content.text); - } - acp::ContentBlock::ResourceLink(resource_link) => { - content.push_str(&format!("@{}", resource_link.uri)); - } - acp::ContentBlock::Audio(_) - | acp::ContentBlock::Image(_) - | acp::ContentBlock::Resource(_) => { - // TODO - } - } - } + let content = acp_content_to_claude(params.prompt); if let Err(err) = session.outgoing_tx.unbounded_send(SdkMessage::User { message: Message { role: Role::User, - content: Content::UntaggedText(content), + content: Content::Chunks(content), id: None, model: None, stop_reason: None, @@ -259,9 +319,17 @@ impl AgentConnection for ClaudeAgentConnection { cx.foreground_executor().spawn(async move { end_rx.await? }) } + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + acp::PromptCapabilities { + image: true, + audio: false, + embedded_context: true, + } + } + fn cancel(&self, session_id: &acp::SessionId, _cx: &mut App) { let sessions = self.sessions.borrow(); - let Some(session) = sessions.get(&session_id) else { + let Some(session) = sessions.get(session_id) else { log::warn!("Attempted to cancel nonexistent session {}", session_id); return; }; @@ -270,7 +338,7 @@ impl AgentConnection for ClaudeAgentConnection { let turn_state = session.turn_state.take(); let TurnState::InProgress { end_tx } = turn_state else { - // Already cancelled or idle, put it back + // Already canceled or idle, put it back session.turn_state.replace(turn_state); return; }; @@ -288,6 +356,10 @@ impl AgentConnection for ClaudeAgentConnection { }) .log_err(); } + + fn into_any(self: Rc) -> Rc { + self + } } #[derive(Clone, Copy)] @@ -301,6 +373,7 @@ fn spawn_claude( command: &AgentServerCommand, mode: ClaudeSessionMode, session_id: acp::SessionId, + api_key: language_models::provider::anthropic::ApiKey, mcp_config_path: &Path, root_dir: &Path, ) -> Result { @@ -318,34 +391,55 @@ fn spawn_claude( &format!( "mcp__{}__{}", mcp_server::SERVER_NAME, - mcp_server::PermissionTool::NAME, + permission_tool::PermissionTool::NAME, ), "--allowedTools", &format!( - "mcp__{}__{},mcp__{}__{}", - mcp_server::SERVER_NAME, - mcp_server::EditTool::NAME, + "mcp__{}__{}", mcp_server::SERVER_NAME, - mcp_server::ReadTool::NAME + read_tool::ReadTool::NAME ), "--disallowedTools", - "Read,Edit", + "Read,Write,Edit,MultiEdit", ]) .args(match mode { ClaudeSessionMode::Start => ["--session-id".to_string(), session_id.to_string()], ClaudeSessionMode::Resume => ["--resume".to_string(), session_id.to_string()], }) .args(command.args.iter().map(|arg| arg.as_str())) + .envs(command.env.iter().flatten()) + .env("ANTHROPIC_API_KEY", api_key.key) .current_dir(root_dir) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::inherit()) + .stderr(std::process::Stdio::piped()) .kill_on_drop(true) .spawn()?; Ok(child) } +fn claude_version(path: PathBuf, cx: &mut AsyncApp) -> Task> { + cx.background_spawn(async move { + let output = new_smol_command(path).arg("--version").output().await?; + let output = String::from_utf8(output.stdout)?; + let version = output + .trim() + .strip_suffix(" (Claude Code)") + .context("parsing Claude version")?; + let version = semver::Version::parse(version)?; + anyhow::Ok(version) + }) +} + +fn claude_help(path: PathBuf, cx: &mut AsyncApp) -> Task> { + cx.background_spawn(async move { + let output = new_smol_command(path).arg("--help").output().await?; + let output = String::from_utf8(output.stdout)?; + anyhow::Ok(output) + }) +} + struct ClaudeAgentSession { outgoing_tx: UnboundedSender, turn_state: Rc>, @@ -370,7 +464,7 @@ enum TurnState { } impl TurnState { - fn is_cancelled(&self) -> bool { + fn is_canceled(&self) -> bool { matches!(self, TurnState::CancelConfirmed { .. }) } @@ -420,10 +514,10 @@ impl ClaudeAgentSession { for chunk in message.content.chunks() { match chunk { ContentChunk::Text { text } | ContentChunk::UntaggedText(text) => { - if !turn_state.borrow().is_cancelled() { + if !turn_state.borrow().is_canceled() { thread .update(cx, |thread, cx| { - thread.push_user_content_block(text.into(), cx) + thread.push_user_content_block(None, text.into(), cx) }) .log_err(); } @@ -435,17 +529,24 @@ impl ClaudeAgentSession { let content = content.to_string(); thread .update(cx, |thread, cx| { + let id = acp::ToolCallId(tool_use_id.into()); + let set_new_content = !content.is_empty() + && thread.tool_call(&id).is_none_or(|(_, tool_call)| { + // preserve rich diff if we have one + tool_call.diffs().next().is_none() + }); + thread.update_tool_call( acp::ToolCallUpdate { - id: acp::ToolCallId(tool_use_id.into()), + id, fields: acp::ToolCallUpdateFields { - status: if turn_state.borrow().is_cancelled() { - // Do not set to completed if turn was cancelled + status: if turn_state.borrow().is_canceled() { + // Do not set to completed if turn was canceled None } else { Some(acp::ToolCallStatus::Completed) }, - content: (!content.is_empty()) + content: set_new_content .then(|| vec![content.into()]), ..Default::default() }, @@ -463,10 +564,17 @@ impl ClaudeAgentSession { chunk ); } + ContentChunk::Image { source } => { + if !turn_state.borrow().is_canceled() { + thread + .update(cx, |thread, cx| { + thread.push_user_content_block(None, source.into(), cx) + }) + .log_err(); + } + } - ContentChunk::Image - | ContentChunk::Document - | ContentChunk::WebSearchToolResult => { + ContentChunk::Document | ContentChunk::WebSearchToolResult => { thread .update(cx, |thread, cx| { thread.push_assistant_content_block( @@ -541,8 +649,9 @@ impl ClaudeAgentSession { thread.upsert_tool_call( claude_tool.as_acp(acp::ToolCallId(id.into())), cx, - ); + )?; } + anyhow::Ok(()) }) .log_err(); } @@ -551,7 +660,14 @@ impl ClaudeAgentSession { "Should not get tool results with role: assistant. should we handle this?" ); } - ContentChunk::Image | ContentChunk::Document => { + ContentChunk::Image { source } => { + thread + .update(cx, |thread, cx| { + thread.push_assistant_content_block(source.into(), false, cx) + }) + .log_err(); + } + ContentChunk::Document => { thread .update(cx, |thread, cx| { thread.push_assistant_content_block( @@ -572,14 +688,13 @@ impl ClaudeAgentSession { .. } => { let turn_state = turn_state.take(); - let was_cancelled = turn_state.is_cancelled(); + let was_canceled = turn_state.is_canceled(); let Some(end_turn_tx) = turn_state.end_tx() else { debug_panic!("Received `SdkMessage::Result` but there wasn't an active turn"); return; }; - if is_error || (!was_cancelled && subtype == ResultErrorType::ErrorDuringExecution) - { + if is_error || (!was_canceled && subtype == ResultErrorType::ErrorDuringExecution) { end_turn_tx .send(Err(anyhow!( "Error: {}", @@ -680,7 +795,7 @@ impl Content { pub fn chunks(self) -> impl Iterator { match self { Self::Chunks(chunks) => chunks.into_iter(), - Self::UntaggedText(text) => vec![ContentChunk::Text { text: text.clone() }].into_iter(), + Self::UntaggedText(text) => vec![ContentChunk::Text { text }].into_iter(), } } } @@ -718,14 +833,44 @@ enum ContentChunk { thinking: String, }, RedactedThinking, + Image { + source: ImageSource, + }, // TODO - Image, Document, WebSearchToolResult, #[serde(untagged)] UntaggedText(String), } +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +enum ImageSource { + Base64 { data: String, media_type: String }, + Url { url: String }, +} + +impl Into for ImageSource { + fn into(self) -> acp::ContentBlock { + match self { + ImageSource::Base64 { data, media_type } => { + acp::ContentBlock::Image(acp::ImageContent { + annotations: None, + data, + mime_type: media_type, + uri: None, + }) + } + ImageSource::Url { url } => acp::ContentBlock::Image(acp::ImageContent { + annotations: None, + data: "".to_string(), + mime_type: "".to_string(), + uri: Some(url), + }), + } + } +} + impl Display for ContentChunk { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -734,7 +879,7 @@ impl Display for ContentChunk { ContentChunk::RedactedThinking => write!(f, "Thinking: [REDACTED]"), ContentChunk::UntaggedText(text) => write!(f, "{}", text), ContentChunk::ToolResult { content, .. } => write!(f, "{}", content), - ContentChunk::Image + ContentChunk::Image { .. } | ContentChunk::Document | ContentChunk::ToolUse { .. } | ContentChunk::WebSearchToolResult => { @@ -846,6 +991,75 @@ impl Display for ResultErrorType { } } +fn acp_content_to_claude(prompt: Vec) -> Vec { + let mut content = Vec::with_capacity(prompt.len()); + let mut context = Vec::with_capacity(prompt.len()); + + for chunk in prompt { + match chunk { + acp::ContentBlock::Text(text_content) => { + content.push(ContentChunk::Text { + text: text_content.text, + }); + } + acp::ContentBlock::ResourceLink(resource_link) => { + match MentionUri::parse(&resource_link.uri) { + Ok(uri) => { + content.push(ContentChunk::Text { + text: format!("{}", uri.as_link()), + }); + } + Err(_) => { + content.push(ContentChunk::Text { + text: resource_link.uri, + }); + } + } + } + acp::ContentBlock::Resource(resource) => match resource.resource { + acp::EmbeddedResourceResource::TextResourceContents(resource) => { + match MentionUri::parse(&resource.uri) { + Ok(uri) => { + content.push(ContentChunk::Text { + text: format!("{}", uri.as_link()), + }); + } + Err(_) => { + content.push(ContentChunk::Text { + text: resource.uri.clone(), + }); + } + } + + context.push(ContentChunk::Text { + text: format!( + "\n\n{}\n", + resource.uri, resource.text + ), + }); + } + acp::EmbeddedResourceResource::BlobResourceContents(_) => { + // Unsupported by SDK + } + }, + acp::ContentBlock::Image(acp::ImageContent { + data, mime_type, .. + }) => content.push(ContentChunk::Image { + source: ImageSource::Base64 { + data, + media_type: mime_type, + }, + }), + acp::ContentBlock::Audio(_) => { + // Unsupported by SDK + } + } + } + + content.extend(context); + content +} + fn new_request_id() -> String { use rand::Rng; // In the Claude Code TS SDK they just generate a random 12 character string, @@ -879,7 +1093,7 @@ pub(crate) mod tests { use gpui::TestAppContext; use serde_json::json; - crate::common_e2e_tests!(ClaudeCode, allow_option_id = "allow"); + crate::common_e2e_tests!(async |_, _, _| ClaudeCode, allow_option_id = "allow"); pub fn local_command() -> AgentServerCommand { AgentServerCommand { @@ -911,7 +1125,7 @@ pub(crate) mod tests { thread.read_with(cx, |thread, _| { entries_len = thread.plan().entries.len(); - assert!(thread.plan().entries.len() > 0, "Empty plan"); + assert!(!thread.plan().entries.is_empty(), "Empty plan"); }); thread @@ -1062,4 +1276,100 @@ pub(crate) mod tests { _ => panic!("Expected ToolResult variant"), } } + + #[test] + fn test_acp_content_to_claude() { + let acp_content = vec![ + acp::ContentBlock::Text(acp::TextContent { + text: "Hello world".to_string(), + annotations: None, + }), + acp::ContentBlock::Image(acp::ImageContent { + data: "base64data".to_string(), + mime_type: "image/png".to_string(), + annotations: None, + uri: None, + }), + acp::ContentBlock::ResourceLink(acp::ResourceLink { + uri: "file:///path/to/example.rs".to_string(), + name: "example.rs".to_string(), + annotations: None, + description: None, + mime_type: None, + size: None, + title: None, + }), + acp::ContentBlock::Resource(acp::EmbeddedResource { + annotations: None, + resource: acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents { + mime_type: None, + text: "fn main() { println!(\"Hello!\"); }".to_string(), + uri: "file:///path/to/code.rs".to_string(), + }, + ), + }), + acp::ContentBlock::ResourceLink(acp::ResourceLink { + uri: "invalid_uri_format".to_string(), + name: "invalid.txt".to_string(), + annotations: None, + description: None, + mime_type: None, + size: None, + title: None, + }), + ]; + + let claude_content = acp_content_to_claude(acp_content); + + assert_eq!(claude_content.len(), 6); + + match &claude_content[0] { + ContentChunk::Text { text } => assert_eq!(text, "Hello world"), + _ => panic!("Expected Text chunk"), + } + + match &claude_content[1] { + ContentChunk::Image { source } => match source { + ImageSource::Base64 { data, media_type } => { + assert_eq!(data, "base64data"); + assert_eq!(media_type, "image/png"); + } + _ => panic!("Expected Base64 image source"), + }, + _ => panic!("Expected Image chunk"), + } + + match &claude_content[2] { + ContentChunk::Text { text } => { + assert!(text.contains("example.rs")); + assert!(text.contains("file:///path/to/example.rs")); + } + _ => panic!("Expected Text chunk for ResourceLink"), + } + + match &claude_content[3] { + ContentChunk::Text { text } => { + assert!(text.contains("code.rs")); + assert!(text.contains("file:///path/to/code.rs")); + } + _ => panic!("Expected Text chunk for Resource"), + } + + match &claude_content[4] { + ContentChunk::Text { text } => { + assert_eq!(text, "invalid_uri_format"); + } + _ => panic!("Expected Text chunk for invalid URI"), + } + + match &claude_content[5] { + ContentChunk::Text { text } => { + assert!(text.contains("")); + assert!(text.contains("fn main() { println!(\"Hello!\"); }")); + assert!(text.contains("")); + } + _ => panic!("Expected Text chunk for context"), + } + } } diff --git a/crates/agent_servers/src/claude/edit_tool.rs b/crates/agent_servers/src/claude/edit_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..a8d93c3f3d5579173709b3ace6194059745885a7 --- /dev/null +++ b/crates/agent_servers/src/claude/edit_tool.rs @@ -0,0 +1,178 @@ +use acp_thread::AcpThread; +use anyhow::Result; +use context_server::{ + listener::{McpServerTool, ToolResponse}, + types::{ToolAnnotations, ToolResponseContent}, +}; +use gpui::{AsyncApp, WeakEntity}; +use language::unified_diff; +use util::markdown::MarkdownCodeBlock; + +use crate::tools::EditToolParams; + +#[derive(Clone)] +pub struct EditTool { + thread_rx: watch::Receiver>, +} + +impl EditTool { + pub fn new(thread_rx: watch::Receiver>) -> Self { + Self { thread_rx } + } +} + +impl McpServerTool for EditTool { + type Input = EditToolParams; + type Output = (); + + const NAME: &'static str = "Edit"; + + fn annotations(&self) -> ToolAnnotations { + ToolAnnotations { + title: Some("Edit file".to_string()), + read_only_hint: Some(false), + destructive_hint: Some(false), + open_world_hint: Some(false), + idempotent_hint: Some(false), + } + } + + async fn run( + &self, + input: Self::Input, + cx: &mut AsyncApp, + ) -> Result> { + let mut thread_rx = self.thread_rx.clone(); + let Some(thread) = thread_rx.recv().await?.upgrade() else { + anyhow::bail!("Thread closed"); + }; + + let content = thread + .update(cx, |thread, cx| { + thread.read_text_file(input.abs_path.clone(), None, None, true, cx) + })? + .await?; + + let (new_content, diff) = cx + .background_executor() + .spawn(async move { + let new_content = content.replace(&input.old_text, &input.new_text); + if new_content == content { + return Err(anyhow::anyhow!("Failed to find `old_text`",)); + } + let diff = unified_diff(&content, &new_content); + + Ok((new_content, diff)) + }) + .await?; + + thread + .update(cx, |thread, cx| { + thread.write_text_file(input.abs_path, new_content, cx) + })? + .await?; + + Ok(ToolResponse { + content: vec![ToolResponseContent::Text { + text: MarkdownCodeBlock { + tag: "diff", + text: diff.as_str().trim_end_matches('\n'), + } + .to_string(), + }], + structured_content: (), + }) + } +} + +#[cfg(test)] +mod tests { + use std::rc::Rc; + + use acp_thread::{AgentConnection, StubAgentConnection}; + use gpui::{Entity, TestAppContext}; + use indoc::indoc; + use project::{FakeFs, Project}; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + use super::*; + + #[gpui::test] + async fn old_text_not_found(cx: &mut TestAppContext) { + let (_thread, tool) = init_test(cx).await; + + let result = tool + .run( + EditToolParams { + abs_path: path!("/root/file.txt").into(), + old_text: "hi".into(), + new_text: "bye".into(), + }, + &mut cx.to_async(), + ) + .await; + + assert_eq!(result.unwrap_err().to_string(), "Failed to find `old_text`"); + } + + #[gpui::test] + async fn found_and_replaced(cx: &mut TestAppContext) { + let (_thread, tool) = init_test(cx).await; + + let result = tool + .run( + EditToolParams { + abs_path: path!("/root/file.txt").into(), + old_text: "hello".into(), + new_text: "hi".into(), + }, + &mut cx.to_async(), + ) + .await; + + assert_eq!( + result.unwrap().content[0].text().unwrap(), + indoc! { + r" + ```diff + @@ -1,1 +1,1 @@ + -hello + +hi + ``` + " + } + ); + } + + async fn init_test(cx: &mut TestAppContext) -> (Entity, EditTool) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + }); + + let connection = Rc::new(StubAgentConnection::new()); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "file.txt": "hello" + }), + ) + .await; + let project = Project::test(fs, [path!("/root").as_ref()], cx).await; + let (mut thread_tx, thread_rx) = watch::channel(WeakEntity::new_invalid()); + + let thread = cx + .update(|cx| connection.new_thread(project, path!("/test").as_ref(), cx)) + .await + .unwrap(); + + thread_tx.send(thread.downgrade()).unwrap(); + + (thread, EditTool::new(thread_rx)) + } +} diff --git a/crates/agent_servers/src/claude/mcp_server.rs b/crates/agent_servers/src/claude/mcp_server.rs index 53a8556e74545bc339936d0b2f9f78444190af0c..6442c784b59a655def92bcae108c27c503daa630 100644 --- a/crates/agent_servers/src/claude/mcp_server.rs +++ b/crates/agent_servers/src/claude/mcp_server.rs @@ -1,18 +1,22 @@ use std::path::PathBuf; +use std::sync::Arc; -use crate::claude::tools::{ClaudeTool, EditToolParams, ReadToolParams}; +use crate::claude::edit_tool::EditTool; +use crate::claude::permission_tool::PermissionTool; +use crate::claude::read_tool::ReadTool; +use crate::claude::write_tool::WriteTool; use acp_thread::AcpThread; -use agent_client_protocol as acp; -use anyhow::{Context, Result}; +#[cfg(not(test))] +use anyhow::Context as _; +use anyhow::Result; use collections::HashMap; -use context_server::listener::{McpServerTool, ToolResponse}; use context_server::types::{ Implementation, InitializeParams, InitializeResponse, ProtocolVersion, ServerCapabilities, - ToolAnnotations, ToolResponseContent, ToolsCapabilities, requests, + ToolsCapabilities, requests, }; use gpui::{App, AsyncApp, Task, WeakEntity}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; +use project::Fs; +use serde::Serialize; pub struct ClaudeZedMcpServer { server: context_server::listener::McpServer, @@ -23,20 +27,16 @@ pub const SERVER_NAME: &str = "zed"; impl ClaudeZedMcpServer { pub async fn new( thread_rx: watch::Receiver>, + fs: Arc, cx: &AsyncApp, ) -> Result { let mut mcp_server = context_server::listener::McpServer::new(cx).await?; mcp_server.handle_request::(Self::handle_initialize); - mcp_server.add_tool(PermissionTool { - thread_rx: thread_rx.clone(), - }); - mcp_server.add_tool(ReadTool { - thread_rx: thread_rx.clone(), - }); - mcp_server.add_tool(EditTool { - thread_rx: thread_rx.clone(), - }); + mcp_server.add_tool(PermissionTool::new(fs.clone(), thread_rx.clone())); + mcp_server.add_tool(ReadTool::new(thread_rx.clone())); + mcp_server.add_tool(EditTool::new(thread_rx.clone())); + mcp_server.add_tool(WriteTool::new(thread_rx.clone())); Ok(Self { server: mcp_server }) } @@ -97,206 +97,3 @@ pub struct McpServerConfig { #[serde(skip_serializing_if = "Option::is_none")] pub env: Option>, } - -// Tools - -#[derive(Clone)] -pub struct PermissionTool { - thread_rx: watch::Receiver>, -} - -#[derive(Deserialize, JsonSchema, Debug)] -pub struct PermissionToolParams { - tool_name: String, - input: serde_json::Value, - tool_use_id: Option, -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PermissionToolResponse { - behavior: PermissionToolBehavior, - updated_input: serde_json::Value, -} - -#[derive(Serialize)] -#[serde(rename_all = "snake_case")] -enum PermissionToolBehavior { - Allow, - Deny, -} - -impl McpServerTool for PermissionTool { - type Input = PermissionToolParams; - type Output = (); - - const NAME: &'static str = "Confirmation"; - - fn description(&self) -> &'static str { - "Request permission for tool calls" - } - - async fn run( - &self, - input: Self::Input, - cx: &mut AsyncApp, - ) -> Result> { - let mut thread_rx = self.thread_rx.clone(); - let Some(thread) = thread_rx.recv().await?.upgrade() else { - anyhow::bail!("Thread closed"); - }; - - let claude_tool = ClaudeTool::infer(&input.tool_name, input.input.clone()); - let tool_call_id = acp::ToolCallId(input.tool_use_id.context("Tool ID required")?.into()); - let allow_option_id = acp::PermissionOptionId("allow".into()); - let reject_option_id = acp::PermissionOptionId("reject".into()); - - let chosen_option = thread - .update(cx, |thread, cx| { - thread.request_tool_call_authorization( - claude_tool.as_acp(tool_call_id), - vec![ - acp::PermissionOption { - id: allow_option_id.clone(), - name: "Allow".into(), - kind: acp::PermissionOptionKind::AllowOnce, - }, - acp::PermissionOption { - id: reject_option_id.clone(), - name: "Reject".into(), - kind: acp::PermissionOptionKind::RejectOnce, - }, - ], - cx, - ) - })? - .await?; - - let response = if chosen_option == allow_option_id { - PermissionToolResponse { - behavior: PermissionToolBehavior::Allow, - updated_input: input.input, - } - } else { - debug_assert_eq!(chosen_option, reject_option_id); - PermissionToolResponse { - behavior: PermissionToolBehavior::Deny, - updated_input: input.input, - } - }; - - Ok(ToolResponse { - content: vec![ToolResponseContent::Text { - text: serde_json::to_string(&response)?, - }], - structured_content: (), - }) - } -} - -#[derive(Clone)] -pub struct ReadTool { - thread_rx: watch::Receiver>, -} - -impl McpServerTool for ReadTool { - type Input = ReadToolParams; - type Output = (); - - const NAME: &'static str = "Read"; - - fn description(&self) -> &'static str { - "Read the contents of a file. In sessions with mcp__zed__Read always use it instead of Read as it contains the most up-to-date contents." - } - - fn annotations(&self) -> ToolAnnotations { - ToolAnnotations { - title: Some("Read file".to_string()), - read_only_hint: Some(true), - destructive_hint: Some(false), - open_world_hint: Some(false), - idempotent_hint: None, - } - } - - async fn run( - &self, - input: Self::Input, - cx: &mut AsyncApp, - ) -> Result> { - let mut thread_rx = self.thread_rx.clone(); - let Some(thread) = thread_rx.recv().await?.upgrade() else { - anyhow::bail!("Thread closed"); - }; - - let content = thread - .update(cx, |thread, cx| { - thread.read_text_file(input.abs_path, input.offset, input.limit, false, cx) - })? - .await?; - - Ok(ToolResponse { - content: vec![ToolResponseContent::Text { text: content }], - structured_content: (), - }) - } -} - -#[derive(Clone)] -pub struct EditTool { - thread_rx: watch::Receiver>, -} - -impl McpServerTool for EditTool { - type Input = EditToolParams; - type Output = (); - - const NAME: &'static str = "Edit"; - - fn description(&self) -> &'static str { - "Edits a file. In sessions with mcp__zed__Edit always use it instead of Edit as it will show the diff to the user better." - } - - fn annotations(&self) -> ToolAnnotations { - ToolAnnotations { - title: Some("Edit file".to_string()), - read_only_hint: Some(false), - destructive_hint: Some(false), - open_world_hint: Some(false), - idempotent_hint: Some(false), - } - } - - async fn run( - &self, - input: Self::Input, - cx: &mut AsyncApp, - ) -> Result> { - let mut thread_rx = self.thread_rx.clone(); - let Some(thread) = thread_rx.recv().await?.upgrade() else { - anyhow::bail!("Thread closed"); - }; - - let content = thread - .update(cx, |thread, cx| { - thread.read_text_file(input.abs_path.clone(), None, None, true, cx) - })? - .await?; - - let new_content = content.replace(&input.old_text, &input.new_text); - if new_content == content { - return Err(anyhow::anyhow!("The old_text was not found in the content")); - } - - thread - .update(cx, |thread, cx| { - thread.write_text_file(input.abs_path, new_content, cx) - })? - .await?; - - Ok(ToolResponse { - content: vec![], - structured_content: (), - }) - } -} diff --git a/crates/agent_servers/src/claude/permission_tool.rs b/crates/agent_servers/src/claude/permission_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..96a24105e87bd99b46ec16e39dc32df57557f882 --- /dev/null +++ b/crates/agent_servers/src/claude/permission_tool.rs @@ -0,0 +1,158 @@ +use std::sync::Arc; + +use acp_thread::AcpThread; +use agent_client_protocol as acp; +use agent_settings::AgentSettings; +use anyhow::{Context as _, Result}; +use context_server::{ + listener::{McpServerTool, ToolResponse}, + types::ToolResponseContent, +}; +use gpui::{AsyncApp, WeakEntity}; +use project::Fs; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings as _, update_settings_file}; +use util::debug_panic; + +use crate::tools::ClaudeTool; + +#[derive(Clone)] +pub struct PermissionTool { + fs: Arc, + thread_rx: watch::Receiver>, +} + +/// Request permission for tool calls +#[derive(Deserialize, JsonSchema, Debug)] +pub struct PermissionToolParams { + tool_name: String, + input: serde_json::Value, + tool_use_id: Option, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PermissionToolResponse { + behavior: PermissionToolBehavior, + updated_input: serde_json::Value, +} + +#[derive(Serialize)] +#[serde(rename_all = "snake_case")] +enum PermissionToolBehavior { + Allow, + Deny, +} + +impl PermissionTool { + pub fn new(fs: Arc, thread_rx: watch::Receiver>) -> Self { + Self { fs, thread_rx } + } +} + +impl McpServerTool for PermissionTool { + type Input = PermissionToolParams; + type Output = (); + + const NAME: &'static str = "Confirmation"; + + async fn run( + &self, + input: Self::Input, + cx: &mut AsyncApp, + ) -> Result> { + if agent_settings::AgentSettings::try_read_global(cx, |settings| { + settings.always_allow_tool_actions + }) + .unwrap_or(false) + { + let response = PermissionToolResponse { + behavior: PermissionToolBehavior::Allow, + updated_input: input.input, + }; + + return Ok(ToolResponse { + content: vec![ToolResponseContent::Text { + text: serde_json::to_string(&response)?, + }], + structured_content: (), + }); + } + + let mut thread_rx = self.thread_rx.clone(); + let Some(thread) = thread_rx.recv().await?.upgrade() else { + anyhow::bail!("Thread closed"); + }; + + let claude_tool = ClaudeTool::infer(&input.tool_name, input.input.clone()); + let tool_call_id = acp::ToolCallId(input.tool_use_id.context("Tool ID required")?.into()); + + const ALWAYS_ALLOW: &str = "always_allow"; + const ALLOW: &str = "allow"; + const REJECT: &str = "reject"; + + let chosen_option = thread + .update(cx, |thread, cx| { + thread.request_tool_call_authorization( + claude_tool.as_acp(tool_call_id).into(), + vec![ + acp::PermissionOption { + id: acp::PermissionOptionId(ALWAYS_ALLOW.into()), + name: "Always Allow".into(), + kind: acp::PermissionOptionKind::AllowAlways, + }, + acp::PermissionOption { + id: acp::PermissionOptionId(ALLOW.into()), + name: "Allow".into(), + kind: acp::PermissionOptionKind::AllowOnce, + }, + acp::PermissionOption { + id: acp::PermissionOptionId(REJECT.into()), + name: "Reject".into(), + kind: acp::PermissionOptionKind::RejectOnce, + }, + ], + cx, + ) + })?? + .await?; + + let response = match chosen_option.0.as_ref() { + ALWAYS_ALLOW => { + cx.update(|cx| { + update_settings_file::(self.fs.clone(), cx, |settings, _| { + settings.set_always_allow_tool_actions(true); + }); + })?; + + PermissionToolResponse { + behavior: PermissionToolBehavior::Allow, + updated_input: input.input, + } + } + ALLOW => PermissionToolResponse { + behavior: PermissionToolBehavior::Allow, + updated_input: input.input, + }, + REJECT => PermissionToolResponse { + behavior: PermissionToolBehavior::Deny, + updated_input: input.input, + }, + opt => { + debug_panic!("Unexpected option: {}", opt); + PermissionToolResponse { + behavior: PermissionToolBehavior::Deny, + updated_input: input.input, + } + } + }; + + Ok(ToolResponse { + content: vec![ToolResponseContent::Text { + text: serde_json::to_string(&response)?, + }], + structured_content: (), + }) + } +} diff --git a/crates/agent_servers/src/claude/read_tool.rs b/crates/agent_servers/src/claude/read_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..cbe25876b3deabc32d1d30f7d8ad4de90fb21494 --- /dev/null +++ b/crates/agent_servers/src/claude/read_tool.rs @@ -0,0 +1,59 @@ +use acp_thread::AcpThread; +use anyhow::Result; +use context_server::{ + listener::{McpServerTool, ToolResponse}, + types::{ToolAnnotations, ToolResponseContent}, +}; +use gpui::{AsyncApp, WeakEntity}; + +use crate::tools::ReadToolParams; + +#[derive(Clone)] +pub struct ReadTool { + thread_rx: watch::Receiver>, +} + +impl ReadTool { + pub fn new(thread_rx: watch::Receiver>) -> Self { + Self { thread_rx } + } +} + +impl McpServerTool for ReadTool { + type Input = ReadToolParams; + type Output = (); + + const NAME: &'static str = "Read"; + + fn annotations(&self) -> ToolAnnotations { + ToolAnnotations { + title: Some("Read file".to_string()), + read_only_hint: Some(true), + destructive_hint: Some(false), + open_world_hint: Some(false), + idempotent_hint: None, + } + } + + async fn run( + &self, + input: Self::Input, + cx: &mut AsyncApp, + ) -> Result> { + let mut thread_rx = self.thread_rx.clone(); + let Some(thread) = thread_rx.recv().await?.upgrade() else { + anyhow::bail!("Thread closed"); + }; + + let content = thread + .update(cx, |thread, cx| { + thread.read_text_file(input.abs_path, input.offset, input.limit, false, cx) + })? + .await?; + + Ok(ToolResponse { + content: vec![ToolResponseContent::Text { text: content }], + structured_content: (), + }) + } +} diff --git a/crates/agent_servers/src/claude/tools.rs b/crates/agent_servers/src/claude/tools.rs index 7ca150c0bd0b30b958a4791db9d01684d16460d6..323190300131c87a443b95e4bb18424cf034e667 100644 --- a/crates/agent_servers/src/claude/tools.rs +++ b/crates/agent_servers/src/claude/tools.rs @@ -34,6 +34,7 @@ impl ClaudeTool { // Known tools "mcp__zed__Read" => Self::ReadFile(serde_json::from_value(input).log_err()), "mcp__zed__Edit" => Self::Edit(serde_json::from_value(input).log_err()), + "mcp__zed__Write" => Self::Write(serde_json::from_value(input).log_err()), "MultiEdit" => Self::MultiEdit(serde_json::from_value(input).log_err()), "Write" => Self::Write(serde_json::from_value(input).log_err()), "LS" => Self::Ls(serde_json::from_value(input).log_err()), @@ -57,7 +58,7 @@ impl ClaudeTool { Self::Terminal(None) } else { Self::Other { - name: tool_name.to_string(), + name: tool_name, input, } } @@ -93,7 +94,7 @@ impl ClaudeTool { } Self::MultiEdit(None) => "Multi Edit".into(), Self::Write(Some(params)) => { - format!("Write {}", params.file_path.display()) + format!("Write {}", params.abs_path.display()) } Self::Write(None) => "Write".into(), Self::Glob(Some(params)) => { @@ -153,7 +154,7 @@ impl ClaudeTool { }], Self::Write(Some(params)) => vec![acp::ToolCallContent::Diff { diff: acp::Diff { - path: params.file_path.clone(), + path: params.abs_path.clone(), old_text: None, new_text: params.content.clone(), }, @@ -229,7 +230,10 @@ impl ClaudeTool { line: None, }] } - Self::Write(Some(WriteToolParams { file_path, .. })) => { + Self::Write(Some(WriteToolParams { + abs_path: file_path, + .. + })) => { vec![acp::ToolCallLocation { path: file_path.clone(), line: None, @@ -302,6 +306,20 @@ impl ClaudeTool { } } +/// Edit a file. +/// +/// In sessions with mcp__zed__Edit always use it instead of Edit as it will +/// allow the user to conveniently review changes. +/// +/// File editing instructions: +/// - The `old_text` param must match existing file content, including indentation. +/// - The `old_text` param must come from the actual file, not an outline. +/// - The `old_text` section must not be empty. +/// - Be minimal with replacements: +/// - For unique lines, include only those lines. +/// - For non-unique lines, include enough context to identify them. +/// - Do not escape quotes, newlines, or other characters. +/// - Only edit the specified file. #[derive(Deserialize, JsonSchema, Debug)] pub struct EditToolParams { /// The absolute path to the file to read. @@ -312,6 +330,11 @@ pub struct EditToolParams { pub new_text: String, } +/// Reads the content of the given file in the project. +/// +/// Never attempt to read a path that hasn't been previously mentioned. +/// +/// In sessions with mcp__zed__Read always use it instead of Read as it contains the most up-to-date contents. #[derive(Deserialize, JsonSchema, Debug)] pub struct ReadToolParams { /// The absolute path to the file to read. @@ -324,11 +347,15 @@ pub struct ReadToolParams { pub limit: Option, } +/// Writes content to the specified file in the project. +/// +/// In sessions with mcp__zed__Write always use it instead of Write as it will +/// allow the user to conveniently review changes. #[derive(Deserialize, JsonSchema, Debug)] pub struct WriteToolParams { - /// Absolute path for new file - pub file_path: PathBuf, - /// File content + /// The absolute path of the file to write. + pub abs_path: PathBuf, + /// The full content to write. pub content: String, } diff --git a/crates/agent_servers/src/claude/write_tool.rs b/crates/agent_servers/src/claude/write_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..39479a9c38ba616b3d0f2e4197c112bcbea68261 --- /dev/null +++ b/crates/agent_servers/src/claude/write_tool.rs @@ -0,0 +1,59 @@ +use acp_thread::AcpThread; +use anyhow::Result; +use context_server::{ + listener::{McpServerTool, ToolResponse}, + types::ToolAnnotations, +}; +use gpui::{AsyncApp, WeakEntity}; + +use crate::tools::WriteToolParams; + +#[derive(Clone)] +pub struct WriteTool { + thread_rx: watch::Receiver>, +} + +impl WriteTool { + pub fn new(thread_rx: watch::Receiver>) -> Self { + Self { thread_rx } + } +} + +impl McpServerTool for WriteTool { + type Input = WriteToolParams; + type Output = (); + + const NAME: &'static str = "Write"; + + fn annotations(&self) -> ToolAnnotations { + ToolAnnotations { + title: Some("Write file".to_string()), + read_only_hint: Some(false), + destructive_hint: Some(false), + open_world_hint: Some(false), + idempotent_hint: Some(false), + } + } + + async fn run( + &self, + input: Self::Input, + cx: &mut AsyncApp, + ) -> Result> { + let mut thread_rx = self.thread_rx.clone(); + let Some(thread) = thread_rx.recv().await?.upgrade() else { + anyhow::bail!("Thread closed"); + }; + + thread + .update(cx, |thread, cx| { + thread.write_text_file(input.abs_path, input.content, cx) + })? + .await?; + + Ok(ToolResponse { + content: vec![], + structured_content: (), + }) + } +} diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index ec6ca29b9dd1a902708a8786ddc6853955da5532..c2710790719251980bc39bb9367a62fdc3cee4a3 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -4,21 +4,30 @@ use std::{ time::Duration, }; -use crate::{AgentServer, AgentServerSettings, AllAgentServersSettings}; +use crate::AgentServer; use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; use agent_client_protocol as acp; use futures::{FutureExt, StreamExt, channel::mpsc, select}; -use gpui::{Entity, TestAppContext}; +use gpui::{AppContext, Entity, TestAppContext}; use indoc::indoc; use project::{FakeFs, Project}; -use settings::{Settings, SettingsStore}; use util::path; -pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let fs = init_test(cx).await; - let project = Project::test(fs, [], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +pub async fn test_basic(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + let project = Project::test(fs.clone(), [], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; thread .update(cx, |thread, cx| thread.send_raw("Hello from Zed!", cx)) @@ -42,8 +51,12 @@ pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppCont }); } -pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let _fs = init_test(cx).await; +pub async fn test_path_mentions(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as _; let tempdir = tempfile::tempdir().unwrap(); std::fs::write( @@ -56,7 +69,13 @@ pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut Tes ) .expect("failed to write file"); let project = Project::example([tempdir.path()], &mut cx.to_async()).await; - let thread = new_test_thread(server, project.clone(), tempdir.path(), cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + tempdir.path(), + cx, + ) + .await; thread .update(cx, |thread, cx| { thread.send( @@ -110,15 +129,25 @@ pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut Tes drop(tempdir); } -pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let _fs = init_test(cx).await; +pub async fn test_tool_call(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as _; let tempdir = tempfile::tempdir().unwrap(); let foo_path = tempdir.path().join("foo"); std::fs::write(&foo_path, "Lorem ipsum dolor").expect("failed to write file"); let project = Project::example([tempdir.path()], &mut cx.to_async()).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; thread .update(cx, |thread, cx| { @@ -134,7 +163,9 @@ pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestApp matches!( entry, AgentThreadEntry::ToolCall(ToolCall { - status: ToolCallStatus::Allowed { .. }, + status: ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::Completed, .. }) ) @@ -150,14 +181,23 @@ pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestApp drop(tempdir); } -pub async fn test_tool_call_with_permission( - server: impl AgentServer + 'static, +pub async fn test_tool_call_with_permission( + server: F, allow_option_id: acp::PermissionOptionId, cx: &mut TestAppContext, -) { - let fs = init_test(cx).await; - let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +) where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + let project = Project::test(fs.clone(), [path!("/private/tmp").as_ref()], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; let full_turn = thread.update(cx, |thread, cx| { thread.send_raw( r#"Run exactly `touch hello.txt && echo "Hello, world!" | tee hello.txt` in the terminal."#, @@ -212,7 +252,9 @@ pub async fn test_tool_call_with_permission( assert!(thread.entries().iter().any(|entry| matches!( entry, AgentThreadEntry::ToolCall(ToolCall { - status: ToolCallStatus::Allowed { .. }, + status: ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::Completed, .. }) ))); @@ -223,7 +265,9 @@ pub async fn test_tool_call_with_permission( thread.read_with(cx, |thread, cx| { let AgentThreadEntry::ToolCall(ToolCall { content, - status: ToolCallStatus::Allowed { .. }, + status: ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::Completed, .. }) = thread .entries() @@ -241,11 +285,21 @@ pub async fn test_tool_call_with_permission( }); } -pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let fs = init_test(cx).await; - - let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +pub async fn test_cancel(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + + let project = Project::test(fs.clone(), [path!("/private/tmp").as_ref()], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; let _ = thread.update(cx, |thread, cx| { thread.send_raw( r#"Run exactly `touch hello.txt && echo "Hello, world!" | tee hello.txt` in the terminal."#, @@ -310,10 +364,20 @@ pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppCon }); } -pub async fn test_thread_drop(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let fs = init_test(cx).await; - let project = Project::test(fs, [], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +pub async fn test_thread_drop(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + let project = Project::test(fs.clone(), [], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; thread .update(cx, |thread, cx| thread.send_raw("Hello from test!", cx)) @@ -380,25 +444,39 @@ macro_rules! common_e2e_tests { } }; } +pub use common_e2e_tests; // Helpers pub async fn init_test(cx: &mut TestAppContext) -> Arc { + #[cfg(test)] + use settings::Settings; + env_logger::try_init().ok(); cx.update(|cx| { - let settings_store = SettingsStore::test(cx); + let settings_store = settings::SettingsStore::test(cx); cx.set_global(settings_store); Project::init_settings(cx); language::init(cx); + gpui_tokio::init(cx); + let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap(); + cx.set_http_client(Arc::new(http_client)); + client::init_settings(cx); + let client = client::Client::production(cx); + let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); + language_model::init(client.clone(), cx); + language_models::init(user_store, client, cx); + agent_settings::init(cx); crate::settings::init(cx); + #[cfg(test)] crate::AllAgentServersSettings::override_global( - AllAgentServersSettings { - claude: Some(AgentServerSettings { + crate::AllAgentServersSettings { + claude: Some(crate::AgentServerSettings { command: crate::claude::tests::local_command(), }), - gemini: Some(AgentServerSettings { + gemini: Some(crate::AgentServerSettings { command: crate::gemini::tests::local_command(), }), }, @@ -422,12 +500,9 @@ pub async fn new_test_thread( .await .unwrap(); - let thread = connection - .new_thread(project.clone(), current_dir.as_ref(), &mut cx.to_async()) + cx.update(|cx| connection.new_thread(project.clone(), current_dir.as_ref(), cx)) .await - .unwrap(); - - thread + .unwrap() } pub async fn run_until_first_tool_call( @@ -465,7 +540,7 @@ pub fn get_zed_path() -> PathBuf { while zed_path .file_name() - .map_or(true, |name| name.to_string_lossy() != "debug") + .is_none_or(|name| name.to_string_lossy() != "debug") { if !zed_path.pop() { panic!("Could not find target directory"); diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index ad883f6da8bd344044e1db0051ca6f24120d5057..3b892e793140462fdcc9b1f03242c96adcd6e059 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -1,10 +1,11 @@ -use std::path::Path; use std::rc::Rc; +use std::{any::Any, path::Path}; use crate::{AgentServer, AgentServerCommand}; use acp_thread::{AgentConnection, LoadError}; use anyhow::Result; use gpui::{Entity, Task}; +use language_models::provider::google::GoogleLanguageModelProvider; use project::Project; use settings::SettingsStore; use ui::App; @@ -18,15 +19,15 @@ const ACP_ARG: &str = "--experimental-acp"; impl AgentServer for Gemini { fn name(&self) -> &'static str { - "Gemini" + "Gemini CLI" } fn empty_state_headline(&self) -> &'static str { - "Welcome to Gemini" + "Welcome to Gemini CLI" } fn empty_state_message(&self) -> &'static str { - "Ask questions, edit files, run commands.\nBe specific for the best results." + "Ask questions, edit files, run commands" } fn logo(&self) -> ui::IconName { @@ -47,12 +48,20 @@ impl AgentServer for Gemini { settings.get::(None).gemini.clone() })?; - let Some(command) = + let Some(mut command) = AgentServerCommand::resolve("gemini", &[ACP_ARG], None, settings, &project, cx).await else { - anyhow::bail!("Failed to find gemini binary"); + return Err(LoadError::NotInstalled { + error_message: "Failed to find Gemini CLI binary".into(), + install_message: "Install Gemini CLI".into(), + install_command: "npm install -g @google/gemini-cli@preview".into() + }.into()); }; + if let Some(api_key)= cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() { + command.env.get_or_insert_default().insert("GEMINI_API_KEY".to_owned(), api_key.key); + } + let result = crate::acp::connect(server_name, command.clone(), &root_dir, cx).await; if result.is_err() { let version_fut = util::command::new_smol_command(&command.path) @@ -75,17 +84,22 @@ impl AgentServer for Gemini { if !supported { return Err(LoadError::Unsupported { error_message: format!( - "Your installed version of Gemini {} doesn't support the Agentic Coding Protocol (ACP).", + "Your installed version of Gemini CLI ({}, version {}) doesn't support the Agentic Coding Protocol (ACP).", + command.path.to_string_lossy(), current_version ).into(), - upgrade_message: "Upgrade Gemini to Latest".into(), - upgrade_command: "npm install -g @google/gemini-cli@latest".into(), + upgrade_message: "Upgrade Gemini CLI to latest".into(), + upgrade_command: "npm install -g @google/gemini-cli@preview".into(), }.into()) } } result }) } + + fn into_any(self: Rc) -> Rc { + self + } } #[cfg(test)] @@ -94,7 +108,7 @@ pub(crate) mod tests { use crate::AgentServerCommand; use std::path::Path; - crate::common_e2e_tests!(Gemini, allow_option_id = "proceed_once"); + crate::common_e2e_tests!(async |_, _, _| Gemini, allow_option_id = "proceed_once"); pub fn local_command() -> AgentServerCommand { let cli_path = Path::new(env!("CARGO_MANIFEST_DIR")) diff --git a/crates/agent_settings/src/agent_profile.rs b/crates/agent_settings/src/agent_profile.rs index a6b8633b34d1e969e8cc8952dbc932e54d38a49f..04fdd4a753a3fd015f2710fb9d70770ad960c560 100644 --- a/crates/agent_settings/src/agent_profile.rs +++ b/crates/agent_settings/src/agent_profile.rs @@ -48,6 +48,20 @@ pub struct AgentProfileSettings { pub context_servers: IndexMap, ContextServerPreset>, } +impl AgentProfileSettings { + pub fn is_tool_enabled(&self, tool_name: &str) -> bool { + self.tools.get(tool_name) == Some(&true) + } + + pub fn is_context_server_tool_enabled(&self, server_id: &str, tool_name: &str) -> bool { + self.enable_all_context_servers + || self + .context_servers + .get(server_id) + .is_some_and(|preset| preset.tools.get(tool_name) == Some(&true)) + } +} + #[derive(Debug, Clone, Default)] pub struct ContextServerPreset { pub tools: IndexMap, bool>, diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index e6a79963d670d2bc2067826855202104bcf1621c..ed1ed2b89879c18eceaab22843390a766e4f6c77 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -15,6 +15,8 @@ pub use crate::agent_profile::*; pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("../../agent/src/prompts/summarize_thread_prompt.txt"); +pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str = + include_str!("../../agent/src/prompts/summarize_thread_detailed_prompt.txt"); pub fn init(cx: &mut App) { AgentSettings::register(cx); @@ -116,15 +118,15 @@ pub struct LanguageModelParameters { impl LanguageModelParameters { pub fn matches(&self, model: &Arc) -> bool { - if let Some(provider) = &self.provider { - if provider.0 != model.provider_id().0 { - return false; - } + if let Some(provider) = &self.provider + && provider.0 != model.provider_id().0 + { + return false; } - if let Some(setting_model) = &self.model { - if *setting_model != model.id().0 { - return false; - } + if let Some(setting_model) = &self.model + && *setting_model != model.id().0 + { + return false; } true } @@ -309,7 +311,7 @@ pub struct AgentSettingsContent { /// /// Default: true expand_terminal_card: Option, - /// Whether to always use cmd-enter (or ctrl-enter on Linux) to send messages in the agent panel. + /// Whether to always use cmd-enter (or ctrl-enter on Linux or Windows) to send messages in the agent panel. /// /// Default: false use_modifier_to_send: Option, @@ -442,10 +444,6 @@ impl Settings for AgentSettings { &mut settings.inline_alternatives, value.inline_alternatives.clone(), ); - merge( - &mut settings.always_allow_tool_actions, - value.always_allow_tool_actions, - ); merge( &mut settings.notify_when_agent_waiting, value.notify_when_agent_waiting, @@ -507,6 +505,19 @@ impl Settings for AgentSettings { } } + debug_assert!( + !sources.default.always_allow_tool_actions.unwrap_or(false), + "For security, agent.always_allow_tool_actions should always be false in default.json. If it's true, that is a bug that should be fixed!" + ); + + // For security reasons, only trust the user's global settings for whether to always allow tool actions. + // If this could be overridden locally, an attacker could (e.g. by committing to source control and + // convincing you to switch branches) modify your project-local settings to disable the agent's safety checks. + settings.always_allow_tool_actions = sources + .user + .and_then(|setting| setting.always_allow_tool_actions) + .unwrap_or(false); + Ok(settings) } diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index c145df0eaecd5c504830e1985a3d38911b000a5e..43e3b251245af5d014c952afe2fec1f30abafe53 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -17,6 +17,7 @@ test-support = ["gpui/test-support", "language/test-support"] [dependencies] acp_thread.workspace = true +action_log.workspace = true agent-client-protocol.workspace = true agent.workspace = true agent2.workspace = true @@ -49,7 +50,6 @@ fuzzy.workspace = true gpui.workspace = true html_to_markdown.workspace = true http_client.workspace = true -indexed_docs.workspace = true indoc.workspace = true inventory.workspace = true itertools.workspace = true @@ -92,6 +92,7 @@ time.workspace = true time_format.workspace = true ui.workspace = true ui_input.workspace = true +url.workspace = true urlencoding.workspace = true util.workspace = true uuid.workspace = true @@ -101,8 +102,13 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] +acp_thread = { workspace = true, features = ["test-support"] } +agent = { workspace = true, features = ["test-support"] } +agent2 = { workspace = true, features = ["test-support"] } +assistant_context = { workspace = true, features = ["test-support"] } assistant_tools.workspace = true buffer_diff = { workspace = true, features = ["test-support"] } +db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, "features" = ["test-support"] } indoc.workspace = true diff --git a/crates/agent_ui/src/acp.rs b/crates/agent_ui/src/acp.rs index cc476b1a862b11d964f731cbb0d5ac8e9f100e59..6f228b91d6ed74c7cb330aecd1c9de07e386bed1 100644 --- a/crates/agent_ui/src/acp.rs +++ b/crates/agent_ui/src/acp.rs @@ -1,6 +1,12 @@ mod completion_provider; -mod message_history; +mod entry_view_state; +mod message_editor; +mod model_selector; +mod model_selector_popover; +mod thread_history; mod thread_view; -pub use message_history::MessageHistory; +pub use model_selector::AcpModelSelector; +pub use model_selector_popover::AcpModelSelectorPopover; +pub use thread_history::*; pub use thread_view::AcpThreadView; diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index fca4ae0300bbe82e6c64703de2b2f3b6b101cf20..3587e5144eab22ec1ab6cf60c4d6eb59c8c5d409 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -1,101 +1,222 @@ +use std::cell::Cell; use std::ops::Range; -use std::path::Path; +use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; +use acp_thread::MentionUri; +use agent_client_protocol as acp; +use agent2::{HistoryEntry, HistoryStore}; use anyhow::Result; -use collections::HashMap; -use editor::display_map::CreaseId; use editor::{CompletionProvider, Editor, ExcerptId}; -use file_icons::FileIcons; +use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{App, Entity, Task, WeakEntity}; use language::{Buffer, CodeLabel, HighlightId}; use lsp::CompletionContext; -use parking_lot::Mutex; -use project::{Completion, CompletionIntent, CompletionResponse, ProjectPath, WorktreeId}; +use project::{ + Completion, CompletionIntent, CompletionResponse, Project, ProjectPath, Symbol, WorktreeId, +}; +use prompt_store::PromptStore; use rope::Point; -use text::{Anchor, ToPoint}; +use text::{Anchor, ToPoint as _}; use ui::prelude::*; use workspace::Workspace; -use crate::context_picker::MentionLink; -use crate::context_picker::file_context_picker::{extract_file_name_and_directory, search_files}; - -#[derive(Default)] -pub struct MentionSet { - paths_by_crease_id: HashMap, +use crate::AgentPanel; +use crate::acp::message_editor::MessageEditor; +use crate::context_picker::file_context_picker::{FileMatch, search_files}; +use crate::context_picker::rules_context_picker::{RulesContextEntry, search_rules}; +use crate::context_picker::symbol_context_picker::SymbolMatch; +use crate::context_picker::symbol_context_picker::search_symbols; +use crate::context_picker::{ + ContextPickerAction, ContextPickerEntry, ContextPickerMode, selection_ranges, +}; + +pub(crate) enum Match { + File(FileMatch), + Symbol(SymbolMatch), + Thread(HistoryEntry), + RecentThread(HistoryEntry), + Fetch(SharedString), + Rules(RulesContextEntry), + Entry(EntryMatch), } -impl MentionSet { - pub fn insert(&mut self, crease_id: CreaseId, path: ProjectPath) { - self.paths_by_crease_id.insert(crease_id, path); - } - - pub fn path_for_crease_id(&self, crease_id: CreaseId) -> Option { - self.paths_by_crease_id.get(&crease_id).cloned() - } +pub struct EntryMatch { + mat: Option, + entry: ContextPickerEntry, +} - pub fn drain(&mut self) -> impl Iterator { - self.paths_by_crease_id.drain().map(|(id, _)| id) +impl Match { + pub fn score(&self) -> f64 { + match self { + Match::File(file) => file.mat.score, + Match::Entry(mode) => mode.mat.as_ref().map(|mat| mat.score).unwrap_or(1.), + Match::Thread(_) => 1., + Match::RecentThread(_) => 1., + Match::Symbol(_) => 1., + Match::Rules(_) => 1., + Match::Fetch(_) => 1., + } } } pub struct ContextPickerCompletionProvider { + message_editor: WeakEntity, workspace: WeakEntity, - editor: WeakEntity, - mention_set: Arc>, + history_store: Entity, + prompt_store: Option>, + prompt_capabilities: Rc>, } impl ContextPickerCompletionProvider { pub fn new( - mention_set: Arc>, + message_editor: WeakEntity, workspace: WeakEntity, - editor: WeakEntity, + history_store: Entity, + prompt_store: Option>, + prompt_capabilities: Rc>, ) -> Self { Self { - mention_set, + message_editor, workspace, - editor, + history_store, + prompt_store, + prompt_capabilities, + } + } + + fn completion_for_entry( + entry: ContextPickerEntry, + source_range: Range, + message_editor: WeakEntity, + workspace: &Entity, + cx: &mut App, + ) -> Option { + match entry { + ContextPickerEntry::Mode(mode) => Some(Completion { + replace_range: source_range, + new_text: format!("@{} ", mode.keyword()), + label: CodeLabel::plain(mode.label().to_string(), None), + icon_path: Some(mode.icon().path().into()), + documentation: None, + source: project::CompletionSource::Custom, + insert_text_mode: None, + // This ensures that when a user accepts this completion, the + // completion menu will still be shown after "@category " is + // inserted + confirm: Some(Arc::new(|_, _, _| true)), + }), + ContextPickerEntry::Action(action) => { + Self::completion_for_action(action, source_range, message_editor, workspace, cx) + } + } + } + + fn completion_for_thread( + thread_entry: HistoryEntry, + source_range: Range, + recent: bool, + editor: WeakEntity, + cx: &mut App, + ) -> Completion { + let uri = thread_entry.mention_uri(); + + let icon_for_completion = if recent { + IconName::HistoryRerun.path().into() + } else { + uri.icon_path(cx) + }; + + let new_text = format!("{} ", uri.as_link()); + + let new_text_len = new_text.len(); + Completion { + replace_range: source_range.clone(), + new_text, + label: CodeLabel::plain(thread_entry.title().to_string(), None), + documentation: None, + insert_text_mode: None, + source: project::CompletionSource::Custom, + icon_path: Some(icon_for_completion), + confirm: Some(confirm_completion_callback( + thread_entry.title().clone(), + source_range.start, + new_text_len - 1, + editor, + uri, + )), } } - fn completion_for_path( + fn completion_for_rules( + rule: RulesContextEntry, + source_range: Range, + editor: WeakEntity, + cx: &mut App, + ) -> Completion { + let uri = MentionUri::Rule { + id: rule.prompt_id.into(), + name: rule.title.to_string(), + }; + let new_text = format!("{} ", uri.as_link()); + let new_text_len = new_text.len(); + let icon_path = uri.icon_path(cx); + Completion { + replace_range: source_range.clone(), + new_text, + label: CodeLabel::plain(rule.title.to_string(), None), + documentation: None, + insert_text_mode: None, + source: project::CompletionSource::Custom, + icon_path: Some(icon_path), + confirm: Some(confirm_completion_callback( + rule.title, + source_range.start, + new_text_len - 1, + editor, + uri, + )), + } + } + + pub(crate) fn completion_for_path( project_path: ProjectPath, path_prefix: &str, is_recent: bool, is_directory: bool, - excerpt_id: ExcerptId, source_range: Range, - editor: Entity, - mention_set: Arc>, - cx: &App, - ) -> Completion { + message_editor: WeakEntity, + project: Entity, + cx: &mut App, + ) -> Option { let (file_name, directory) = - extract_file_name_and_directory(&project_path.path, path_prefix); + crate::context_picker::file_context_picker::extract_file_name_and_directory( + &project_path.path, + path_prefix, + ); let label = build_code_label_for_full_path(&file_name, directory.as_ref().map(|s| s.as_ref()), cx); - let full_path = if let Some(directory) = directory { - format!("{}{}", directory, file_name) - } else { - file_name.to_string() - }; - let crease_icon_path = if is_directory { - FileIcons::get_folder_icon(false, cx).unwrap_or_else(|| IconName::Folder.path().into()) + let abs_path = project.read(cx).absolute_path(&project_path, cx)?; + + let uri = if is_directory { + MentionUri::Directory { abs_path } } else { - FileIcons::get_icon(Path::new(&full_path), cx) - .unwrap_or_else(|| IconName::File.path().into()) + MentionUri::File { abs_path } }; + + let crease_icon_path = uri.icon_path(cx); let completion_icon_path = if is_recent { IconName::HistoryRerun.path().into() } else { - crease_icon_path.clone() + crease_icon_path }; - let new_text = format!("{} ", MentionLink::for_file(&file_name, &full_path)); + let new_text = format!("{} ", uri.as_link()); let new_text_len = new_text.len(); - Completion { + Some(Completion { replace_range: source_range.clone(), new_text, label, @@ -104,28 +225,409 @@ impl ContextPickerCompletionProvider { icon_path: Some(completion_icon_path), insert_text_mode: None, confirm: Some(confirm_completion_callback( - crease_icon_path, file_name, - project_path, - excerpt_id, source_range.start, new_text_len - 1, - editor, - mention_set, + message_editor, + uri, )), + }) + } + + fn completion_for_symbol( + symbol: Symbol, + source_range: Range, + message_editor: WeakEntity, + workspace: Entity, + cx: &mut App, + ) -> Option { + let project = workspace.read(cx).project().clone(); + + let label = CodeLabel::plain(symbol.name.clone(), None); + + let abs_path = project.read(cx).absolute_path(&symbol.path, cx)?; + let uri = MentionUri::Symbol { + path: abs_path, + name: symbol.name.clone(), + line_range: symbol.range.start.0.row..symbol.range.end.0.row, + }; + let new_text = format!("{} ", uri.as_link()); + let new_text_len = new_text.len(); + let icon_path = uri.icon_path(cx); + Some(Completion { + replace_range: source_range.clone(), + new_text, + label, + documentation: None, + source: project::CompletionSource::Custom, + icon_path: Some(icon_path), + insert_text_mode: None, + confirm: Some(confirm_completion_callback( + symbol.name.into(), + source_range.start, + new_text_len - 1, + message_editor, + uri, + )), + }) + } + + fn completion_for_fetch( + source_range: Range, + url_to_fetch: SharedString, + message_editor: WeakEntity, + cx: &mut App, + ) -> Option { + let new_text = format!("@fetch {} ", url_to_fetch); + let url_to_fetch = url::Url::parse(url_to_fetch.as_ref()) + .or_else(|_| url::Url::parse(&format!("https://{url_to_fetch}"))) + .ok()?; + let mention_uri = MentionUri::Fetch { + url: url_to_fetch.clone(), + }; + let icon_path = mention_uri.icon_path(cx); + Some(Completion { + replace_range: source_range.clone(), + new_text: new_text.clone(), + label: CodeLabel::plain(url_to_fetch.to_string(), None), + documentation: None, + source: project::CompletionSource::Custom, + icon_path: Some(icon_path), + insert_text_mode: None, + confirm: Some(confirm_completion_callback( + url_to_fetch.to_string().into(), + source_range.start, + new_text.len() - 1, + message_editor, + mention_uri, + )), + }) + } + + pub(crate) fn completion_for_action( + action: ContextPickerAction, + source_range: Range, + message_editor: WeakEntity, + workspace: &Entity, + cx: &mut App, + ) -> Option { + let (new_text, on_action) = match action { + ContextPickerAction::AddSelections => { + const PLACEHOLDER: &str = "selection "; + let selections = selection_ranges(workspace, cx) + .into_iter() + .enumerate() + .map(|(ix, (buffer, range))| { + ( + buffer, + range, + (PLACEHOLDER.len() * ix)..(PLACEHOLDER.len() * (ix + 1) - 1), + ) + }) + .collect::>(); + + let new_text: String = PLACEHOLDER.repeat(selections.len()); + + let callback = Arc::new({ + let source_range = source_range.clone(); + move |_, window: &mut Window, cx: &mut App| { + let selections = selections.clone(); + let message_editor = message_editor.clone(); + let source_range = source_range.clone(); + window.defer(cx, move |window, cx| { + message_editor + .update(cx, |message_editor, cx| { + message_editor.confirm_mention_for_selection( + source_range, + selections, + window, + cx, + ) + }) + .ok(); + }); + false + } + }); + + (new_text, callback) + } + }; + + Some(Completion { + replace_range: source_range, + new_text, + label: CodeLabel::plain(action.label().to_string(), None), + icon_path: Some(action.icon().path().into()), + documentation: None, + source: project::CompletionSource::Custom, + insert_text_mode: None, + // This ensures that when a user accepts this completion, the + // completion menu will still be shown after "@category " is + // inserted + confirm: Some(on_action), + }) + } + + fn search( + &self, + mode: Option, + query: String, + cancellation_flag: Arc, + cx: &mut App, + ) -> Task> { + let Some(workspace) = self.workspace.upgrade() else { + return Task::ready(Vec::default()); + }; + match mode { + Some(ContextPickerMode::File) => { + let search_files_task = search_files(query, cancellation_flag, &workspace, cx); + cx.background_spawn(async move { + search_files_task + .await + .into_iter() + .map(Match::File) + .collect() + }) + } + + Some(ContextPickerMode::Symbol) => { + let search_symbols_task = search_symbols(query, cancellation_flag, &workspace, cx); + cx.background_spawn(async move { + search_symbols_task + .await + .into_iter() + .map(Match::Symbol) + .collect() + }) + } + + Some(ContextPickerMode::Thread) => { + let search_threads_task = + search_threads(query, cancellation_flag, &self.history_store, cx); + cx.background_spawn(async move { + search_threads_task + .await + .into_iter() + .map(Match::Thread) + .collect() + }) + } + + Some(ContextPickerMode::Fetch) => { + if !query.is_empty() { + Task::ready(vec![Match::Fetch(query.into())]) + } else { + Task::ready(Vec::new()) + } + } + + Some(ContextPickerMode::Rules) => { + if let Some(prompt_store) = self.prompt_store.as_ref() { + let search_rules_task = + search_rules(query, cancellation_flag, prompt_store, cx); + cx.background_spawn(async move { + search_rules_task + .await + .into_iter() + .map(Match::Rules) + .collect::>() + }) + } else { + Task::ready(Vec::new()) + } + } + + None if query.is_empty() => { + let mut matches = self.recent_context_picker_entries(&workspace, cx); + + matches.extend( + self.available_context_picker_entries(&workspace, cx) + .into_iter() + .map(|mode| { + Match::Entry(EntryMatch { + entry: mode, + mat: None, + }) + }), + ); + + Task::ready(matches) + } + None => { + let executor = cx.background_executor().clone(); + + let search_files_task = + search_files(query.clone(), cancellation_flag, &workspace, cx); + + let entries = self.available_context_picker_entries(&workspace, cx); + let entry_candidates = entries + .iter() + .enumerate() + .map(|(ix, entry)| StringMatchCandidate::new(ix, entry.keyword())) + .collect::>(); + + cx.background_spawn(async move { + let mut matches = search_files_task + .await + .into_iter() + .map(Match::File) + .collect::>(); + + let entry_matches = fuzzy::match_strings( + &entry_candidates, + &query, + false, + true, + 100, + &Arc::new(AtomicBool::default()), + executor, + ) + .await; + + matches.extend(entry_matches.into_iter().map(|mat| { + Match::Entry(EntryMatch { + entry: entries[mat.candidate_id], + mat: Some(mat), + }) + })); + + matches.sort_by(|a, b| { + b.score() + .partial_cmp(&a.score()) + .unwrap_or(std::cmp::Ordering::Equal) + }); + + matches + }) + } } } + + fn recent_context_picker_entries( + &self, + workspace: &Entity, + cx: &mut App, + ) -> Vec { + let mut recent = Vec::with_capacity(6); + + let mut mentions = self + .message_editor + .read_with(cx, |message_editor, _cx| message_editor.mentions()) + .unwrap_or_default(); + let workspace = workspace.read(cx); + let project = workspace.project().read(cx); + + if let Some(agent_panel) = workspace.panel::(cx) + && let Some(thread) = agent_panel.read(cx).active_agent_thread(cx) + { + let thread = thread.read(cx); + mentions.insert(MentionUri::Thread { + id: thread.session_id().clone(), + name: thread.title().into(), + }); + } + + recent.extend( + workspace + .recent_navigation_history_iter(cx) + .filter(|(_, abs_path)| { + abs_path.as_ref().is_none_or(|path| { + !mentions.contains(&MentionUri::File { + abs_path: path.clone(), + }) + }) + }) + .take(4) + .filter_map(|(project_path, _)| { + project + .worktree_for_id(project_path.worktree_id, cx) + .map(|worktree| { + let path_prefix = worktree.read(cx).root_name().into(); + Match::File(FileMatch { + mat: fuzzy::PathMatch { + score: 1., + positions: Vec::new(), + worktree_id: project_path.worktree_id.to_usize(), + path: project_path.path, + path_prefix, + is_dir: false, + distance_to_relative_ancestor: 0, + }, + is_recent: true, + }) + }) + }), + ); + + if self.prompt_capabilities.get().embedded_context { + const RECENT_COUNT: usize = 2; + let threads = self + .history_store + .read(cx) + .recently_opened_entries(cx) + .into_iter() + .filter(|thread| !mentions.contains(&thread.mention_uri())) + .take(RECENT_COUNT) + .collect::>(); + + recent.extend(threads.into_iter().map(Match::RecentThread)); + } + + recent + } + + fn available_context_picker_entries( + &self, + workspace: &Entity, + cx: &mut App, + ) -> Vec { + let embedded_context = self.prompt_capabilities.get().embedded_context; + let mut entries = if embedded_context { + vec![ + ContextPickerEntry::Mode(ContextPickerMode::File), + ContextPickerEntry::Mode(ContextPickerMode::Symbol), + ContextPickerEntry::Mode(ContextPickerMode::Thread), + ] + } else { + // File is always available, but we don't need a mode entry + vec![] + }; + + let has_selection = workspace + .read(cx) + .active_item(cx) + .and_then(|item| item.downcast::()) + .is_some_and(|editor| { + editor.update(cx, |editor, cx| editor.has_non_empty_selection(cx)) + }); + if has_selection { + entries.push(ContextPickerEntry::Action( + ContextPickerAction::AddSelections, + )); + } + + if embedded_context { + if self.prompt_store.is_some() { + entries.push(ContextPickerEntry::Mode(ContextPickerMode::Rules)); + } + + entries.push(ContextPickerEntry::Mode(ContextPickerMode::Fetch)); + } + + entries + } } fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); let mut label = CodeLabel::default(); - label.push_str(&file_name, None); + label.push_str(file_name, None); label.push_str(" ", None); if let Some(directory) = directory { - label.push_str(&directory, comment_id); + label.push_str(directory, comment_id); } label.filter_range = 0..label.text().len(); @@ -136,7 +638,7 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: impl CompletionProvider for ContextPickerCompletionProvider { fn completions( &self, - excerpt_id: ExcerptId, + _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: Anchor, _trigger: CompletionContext, @@ -149,7 +651,11 @@ impl CompletionProvider for ContextPickerCompletionProvider { let offset_to_line = buffer.point_to_offset(line_start); let mut lines = buffer.text_for_range(line_start..position).lines(); let line = lines.next()?; - MentionCompletion::try_parse(line, offset_to_line) + MentionCompletion::try_parse( + self.prompt_capabilities.get().embedded_context, + line, + offset_to_line, + ) }); let Some(state) = state else { return Task::ready(Ok(Vec::new())); @@ -159,44 +665,88 @@ impl CompletionProvider for ContextPickerCompletionProvider { return Task::ready(Ok(Vec::new())); }; + let project = workspace.read(cx).project().clone(); let snapshot = buffer.read(cx).snapshot(); let source_range = snapshot.anchor_before(state.source_range.start) ..snapshot.anchor_after(state.source_range.end); - let editor = self.editor.clone(); - let mention_set = self.mention_set.clone(); - let MentionCompletion { argument, .. } = state; + let editor = self.message_editor.clone(); + + let MentionCompletion { mode, argument, .. } = state; let query = argument.unwrap_or_else(|| "".to_string()); - let search_task = search_files(query.clone(), Arc::::default(), &workspace, cx); + let search_task = self.search(mode, query, Arc::::default(), cx); cx.spawn(async move |_, cx| { let matches = search_task.await; - let Some(editor) = editor.upgrade() else { - return Ok(Vec::new()); - }; let completions = cx.update(|cx| { matches .into_iter() - .map(|mat| { - let path_match = &mat.mat; - let project_path = ProjectPath { - worktree_id: WorktreeId::from_usize(path_match.worktree_id), - path: path_match.path.clone(), - }; - - Self::completion_for_path( - project_path, - &path_match.path_prefix, - mat.is_recent, - path_match.is_dir, - excerpt_id, + .filter_map(|mat| match mat { + Match::File(FileMatch { mat, is_recent }) => { + let project_path = ProjectPath { + worktree_id: WorktreeId::from_usize(mat.worktree_id), + path: mat.path.clone(), + }; + + Self::completion_for_path( + project_path, + &mat.path_prefix, + is_recent, + mat.is_dir, + source_range.clone(), + editor.clone(), + project.clone(), + cx, + ) + } + + Match::Symbol(SymbolMatch { symbol, .. }) => Self::completion_for_symbol( + symbol, source_range.clone(), editor.clone(), - mention_set.clone(), + workspace.clone(), cx, - ) + ), + + Match::Thread(thread) => Some(Self::completion_for_thread( + thread, + source_range.clone(), + false, + editor.clone(), + cx, + )), + + Match::RecentThread(thread) => Some(Self::completion_for_thread( + thread, + source_range.clone(), + true, + editor.clone(), + cx, + )), + + Match::Rules(user_rules) => Some(Self::completion_for_rules( + user_rules, + source_range.clone(), + editor.clone(), + cx, + )), + + Match::Fetch(url) => Self::completion_for_fetch( + source_range.clone(), + url, + editor.clone(), + cx, + ), + + Match::Entry(EntryMatch { entry, .. }) => Self::completion_for_entry( + entry, + source_range.clone(), + editor.clone(), + &workspace, + cx, + ), }) .collect() })?; @@ -225,12 +775,16 @@ impl CompletionProvider for ContextPickerCompletionProvider { let offset_to_line = buffer.point_to_offset(line_start); let mut lines = buffer.text_for_range(line_start..position).lines(); if let Some(line) = lines.next() { - MentionCompletion::try_parse(line, offset_to_line) - .map(|completion| { - completion.source_range.start <= offset_to_line + position.column as usize - && completion.source_range.end >= offset_to_line + position.column as usize - }) - .unwrap_or(false) + MentionCompletion::try_parse( + self.prompt_capabilities.get().embedded_context, + line, + offset_to_line, + ) + .map(|completion| { + completion.source_range.start <= offset_to_line + position.column as usize + && completion.source_range.end >= offset_to_line + position.column as usize + }) + .unwrap_or(false) } else { false } @@ -245,36 +799,69 @@ impl CompletionProvider for ContextPickerCompletionProvider { } } +pub(crate) fn search_threads( + query: String, + cancellation_flag: Arc, + history_store: &Entity, + cx: &mut App, +) -> Task> { + let threads = history_store.read(cx).entries(cx); + if query.is_empty() { + return Task::ready(threads); + } + + let executor = cx.background_executor().clone(); + cx.background_spawn(async move { + let candidates = threads + .iter() + .enumerate() + .map(|(id, thread)| StringMatchCandidate::new(id, thread.title())) + .collect::>(); + let matches = fuzzy::match_strings( + &candidates, + &query, + false, + true, + 100, + &cancellation_flag, + executor, + ) + .await; + + matches + .into_iter() + .map(|mat| threads[mat.candidate_id].clone()) + .collect() + }) +} + fn confirm_completion_callback( - crease_icon_path: SharedString, crease_text: SharedString, - project_path: ProjectPath, - excerpt_id: ExcerptId, start: Anchor, content_len: usize, - editor: Entity, - mention_set: Arc>, + message_editor: WeakEntity, + mention_uri: MentionUri, ) -> Arc bool + Send + Sync> { Arc::new(move |_, window, cx| { + let message_editor = message_editor.clone(); let crease_text = crease_text.clone(); - let crease_icon_path = crease_icon_path.clone(); - let editor = editor.clone(); - let project_path = project_path.clone(); - let mention_set = mention_set.clone(); + let mention_uri = mention_uri.clone(); window.defer(cx, move |window, cx| { - let crease_id = crate::context_picker::insert_crease_for_mention( - excerpt_id, - start, - content_len, - crease_text.clone(), - crease_icon_path, - editor.clone(), - window, - cx, - ); - if let Some(crease_id) = crease_id { - mention_set.lock().insert(crease_id, project_path); - } + message_editor + .clone() + .update(cx, |message_editor, cx| { + message_editor + .confirm_completion( + crease_text, + start, + content_len, + mention_uri, + window, + cx, + ) + .detach(); + }) + .ok(); }); false }) @@ -283,11 +870,12 @@ fn confirm_completion_callback( #[derive(Debug, Default, PartialEq)] struct MentionCompletion { source_range: Range, + mode: Option, argument: Option, } impl MentionCompletion { - fn try_parse(line: &str, offset_to_line: usize) -> Option { + fn try_parse(allow_non_file_mentions: bool, line: &str, offset_to_line: usize) -> Option { let last_mention_start = line.rfind('@')?; if last_mention_start >= line.len() { return Some(Self::default()); @@ -296,23 +884,45 @@ impl MentionCompletion { && line .chars() .nth(last_mention_start - 1) - .map_or(false, |c| !c.is_whitespace()) + .is_some_and(|c| !c.is_whitespace()) { return None; } let rest_of_line = &line[last_mention_start + 1..]; + + let mut mode = None; let mut argument = None; let mut parts = rest_of_line.split_whitespace(); let mut end = last_mention_start + 1; - if let Some(argument_text) = parts.next() { - end += argument_text.len(); - argument = Some(argument_text.to_string()); + if let Some(mode_text) = parts.next() { + end += mode_text.len(); + + if let Some(parsed_mode) = ContextPickerMode::try_from(mode_text).ok() + && (allow_non_file_mentions || matches!(parsed_mode, ContextPickerMode::File)) + { + mode = Some(parsed_mode); + } else { + argument = Some(mode_text.to_string()); + } + match rest_of_line[mode_text.len()..].find(|c: char| !c.is_whitespace()) { + Some(whitespace_count) => { + if let Some(argument_text) = parts.next() { + argument = Some(argument_text.to_string()); + end += whitespace_count + argument_text.len(); + } + } + None => { + // Rest of line is entirely whitespace + end += rest_of_line.len() - mode_text.len(); + } + } } Some(Self { source_range: last_mention_start + offset_to_line..end + offset_to_line, + mode, argument, }) } @@ -321,254 +931,96 @@ impl MentionCompletion { #[cfg(test)] mod tests { use super::*; - use gpui::{EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext}; - use project::{Project, ProjectPath}; - use serde_json::json; - use settings::SettingsStore; - use std::{ops::Deref, rc::Rc}; - use util::path; - use workspace::{AppState, Item}; #[test] fn test_mention_completion_parse() { - assert_eq!(MentionCompletion::try_parse("Lorem Ipsum", 0), None); + assert_eq!(MentionCompletion::try_parse(true, "Lorem Ipsum", 0), None); assert_eq!( - MentionCompletion::try_parse("Lorem @", 0), + MentionCompletion::try_parse(true, "Lorem @", 0), Some(MentionCompletion { source_range: 6..7, + mode: None, argument: None, }) ); assert_eq!( - MentionCompletion::try_parse("Lorem @main", 0), + MentionCompletion::try_parse(true, "Lorem @file", 0), Some(MentionCompletion { source_range: 6..11, - argument: Some("main".to_string()), + mode: Some(ContextPickerMode::File), + argument: None, }) ); - assert_eq!(MentionCompletion::try_parse("test@", 0), None); - } - - struct AtMentionEditor(Entity); - - impl Item for AtMentionEditor { - type Event = (); - - fn include_in_nav_history() -> bool { - false - } - - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - "Test".into() - } - } - - impl EventEmitter<()> for AtMentionEditor {} - - impl Focusable for AtMentionEditor { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.0.read(cx).focus_handle(cx).clone() - } - } - - impl Render for AtMentionEditor { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.0.clone().into_any_element() - } - } - - #[gpui::test] - async fn test_context_completion_provider(cx: &mut TestAppContext) { - init_test(cx); - - let app_state = cx.update(AppState::test); - - cx.update(|cx| { - language::init(cx); - editor::init(cx); - workspace::init(app_state.clone(), cx); - Project::init_settings(cx); - }); - - app_state - .fs - .as_fake() - .insert_tree( - path!("/dir"), - json!({ - "editor": "", - "a": { - "one.txt": "", - "two.txt": "", - "three.txt": "", - "four.txt": "" - }, - "b": { - "five.txt": "", - "six.txt": "", - "seven.txt": "", - "eight.txt": "", - } - }), - ) - .await; - - let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; - let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let workspace = window.root(cx).unwrap(); - - let worktree = project.update(cx, |project, cx| { - let mut worktrees = project.worktrees(cx).collect::>(); - assert_eq!(worktrees.len(), 1); - worktrees.pop().unwrap() - }); - let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); - - let mut cx = VisualTestContext::from_window(*window.deref(), cx); - - let paths = vec![ - path!("a/one.txt"), - path!("a/two.txt"), - path!("a/three.txt"), - path!("a/four.txt"), - path!("b/five.txt"), - path!("b/six.txt"), - path!("b/seven.txt"), - path!("b/eight.txt"), - ]; - - let mut opened_editors = Vec::new(); - for path in paths { - let buffer = workspace - .update_in(&mut cx, |workspace, window, cx| { - workspace.open_path( - ProjectPath { - worktree_id, - path: Path::new(path).into(), - }, - None, - false, - window, - cx, - ) - }) - .await - .unwrap(); - opened_editors.push(buffer); - } - - let editor = workspace.update_in(&mut cx, |workspace, window, cx| { - let editor = cx.new(|cx| { - Editor::new( - editor::EditorMode::full(), - multi_buffer::MultiBuffer::build_simple("", cx), - None, - window, - cx, - ) - }); - workspace.active_pane().update(cx, |pane, cx| { - pane.add_item( - Box::new(cx.new(|_| AtMentionEditor(editor.clone()))), - true, - true, - None, - window, - cx, - ); - }); - editor - }); - - let mention_set = Arc::new(Mutex::new(MentionSet::default())); + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @file ", 0), + Some(MentionCompletion { + source_range: 6..12, + mode: Some(ContextPickerMode::File), + argument: None, + }) + ); - let editor_entity = editor.downgrade(); - editor.update_in(&mut cx, |editor, window, cx| { - window.focus(&editor.focus_handle(cx)); - editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( - mention_set.clone(), - workspace.downgrade(), - editor_entity, - )))); - }); + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @file main.rs", 0), + Some(MentionCompletion { + source_range: 6..19, + mode: Some(ContextPickerMode::File), + argument: Some("main.rs".to_string()), + }) + ); - cx.simulate_input("Lorem "); + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @file main.rs ", 0), + Some(MentionCompletion { + source_range: 6..19, + mode: Some(ContextPickerMode::File), + argument: Some("main.rs".to_string()), + }) + ); - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem "); - assert!(!editor.has_visible_completions_menu()); - }); + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @file main.rs Ipsum", 0), + Some(MentionCompletion { + source_range: 6..19, + mode: Some(ContextPickerMode::File), + argument: Some("main.rs".to_string()), + }) + ); - cx.simulate_input("@"); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem @"); - assert!(editor.has_visible_completions_menu()); - assert_eq!( - current_completion_labels(editor), - &[ - "eight.txt dir/b/", - "seven.txt dir/b/", - "six.txt dir/b/", - "five.txt dir/b/", - "four.txt dir/a/", - "three.txt dir/a/", - "two.txt dir/a/", - "one.txt dir/a/", - "dir ", - "a dir/", - "four.txt dir/a/", - "one.txt dir/a/", - "three.txt dir/a/", - "two.txt dir/a/", - "b dir/", - "eight.txt dir/b/", - "five.txt dir/b/", - "seven.txt dir/b/", - "six.txt dir/b/", - "editor dir/" - ] - ); - }); + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @main", 0), + Some(MentionCompletion { + source_range: 6..11, + mode: None, + argument: Some("main".to_string()), + }) + ); - // Select and confirm "File" - editor.update_in(&mut cx, |editor, window, cx| { - assert!(editor.has_visible_completions_menu()); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); + assert_eq!(MentionCompletion::try_parse(true, "test@", 0), None); - cx.run_until_parked(); + // Allowed non-file mentions - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem [@four.txt](@file:dir/a/four.txt) "); - }); - } + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @symbol main", 0), + Some(MentionCompletion { + source_range: 6..18, + mode: Some(ContextPickerMode::Symbol), + argument: Some("main".to_string()), + }) + ); - fn current_completion_labels(editor: &Editor) -> Vec { - let completions = editor.current_completions().expect("Missing completions"); - completions - .into_iter() - .map(|completion| completion.label.text.to_string()) - .collect::>() - } + // Disallowed non-file mentions - pub(crate) fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let store = SettingsStore::test(cx); - cx.set_global(store); - theme::init(theme::LoadThemes::JustBase, cx); - client::init_settings(cx); - language::init(cx); - Project::init_settings(cx); - workspace::init_settings(cx); - editor::init_settings(cx); - }); + assert_eq!( + MentionCompletion::try_parse(false, "Lorem @symbol main", 0), + Some(MentionCompletion { + source_range: 6..18, + mode: None, + argument: Some("main".to_string()), + }) + ); } } diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/acp/entry_view_state.rs new file mode 100644 index 0000000000000000000000000000000000000000..c310473259bc07468cd8a062e15dd8025645f90f --- /dev/null +++ b/crates/agent_ui/src/acp/entry_view_state.rs @@ -0,0 +1,477 @@ +use std::ops::Range; + +use acp_thread::{AcpThread, AgentThreadEntry}; +use agent_client_protocol::ToolCallId; +use agent2::HistoryStore; +use collections::HashMap; +use editor::{Editor, EditorMode, MinimapVisibility}; +use gpui::{ + AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, Focusable, + TextStyleRefinement, WeakEntity, Window, +}; +use language::language_settings::SoftWrap; +use project::Project; +use prompt_store::PromptStore; +use settings::Settings as _; +use terminal_view::TerminalView; +use theme::ThemeSettings; +use ui::{Context, TextSize}; +use workspace::Workspace; + +use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; + +pub struct EntryViewState { + workspace: WeakEntity, + project: Entity, + history_store: Entity, + prompt_store: Option>, + entries: Vec, + prevent_slash_commands: bool, +} + +impl EntryViewState { + pub fn new( + workspace: WeakEntity, + project: Entity, + history_store: Entity, + prompt_store: Option>, + prevent_slash_commands: bool, + ) -> Self { + Self { + workspace, + project, + history_store, + prompt_store, + entries: Vec::new(), + prevent_slash_commands, + } + } + + pub fn entry(&self, index: usize) -> Option<&Entry> { + self.entries.get(index) + } + + pub fn sync_entry( + &mut self, + index: usize, + thread: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(thread_entry) = thread.read(cx).entries().get(index) else { + return; + }; + + match thread_entry { + AgentThreadEntry::UserMessage(message) => { + let has_id = message.id.is_some(); + let chunks = message.chunks.clone(); + if let Some(Entry::UserMessage(editor)) = self.entries.get_mut(index) { + if !editor.focus_handle(cx).is_focused(window) { + // Only update if we are not editing. + // If we are, cancelling the edit will set the message to the newest content. + editor.update(cx, |editor, cx| { + editor.set_message(chunks, window, cx); + }); + } + } else { + let message_editor = cx.new(|cx| { + let mut editor = MessageEditor::new( + self.workspace.clone(), + self.project.clone(), + self.history_store.clone(), + self.prompt_store.clone(), + "Edit message - @ to include context", + self.prevent_slash_commands, + editor::EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ); + if !has_id { + editor.set_read_only(true, cx); + } + editor.set_message(chunks, window, cx); + editor + }); + cx.subscribe(&message_editor, move |_, editor, event, cx| { + cx.emit(EntryViewEvent { + entry_index: index, + view_event: ViewEvent::MessageEditorEvent(editor, *event), + }) + }) + .detach(); + self.set_entry(index, Entry::UserMessage(message_editor)); + } + } + AgentThreadEntry::ToolCall(tool_call) => { + let id = tool_call.id.clone(); + let terminals = tool_call.terminals().cloned().collect::>(); + let diffs = tool_call.diffs().cloned().collect::>(); + + let views = if let Some(Entry::Content(views)) = self.entries.get_mut(index) { + views + } else { + self.set_entry(index, Entry::empty()); + let Some(Entry::Content(views)) = self.entries.get_mut(index) else { + unreachable!() + }; + views + }; + + for terminal in terminals { + views.entry(terminal.entity_id()).or_insert_with(|| { + let element = create_terminal( + self.workspace.clone(), + self.project.clone(), + terminal.clone(), + window, + cx, + ) + .into_any(); + cx.emit(EntryViewEvent { + entry_index: index, + view_event: ViewEvent::NewTerminal(id.clone()), + }); + element + }); + } + + for diff in diffs { + views.entry(diff.entity_id()).or_insert_with(|| { + let element = create_editor_diff(diff.clone(), window, cx).into_any(); + cx.emit(EntryViewEvent { + entry_index: index, + view_event: ViewEvent::NewDiff(id.clone()), + }); + element + }); + } + } + AgentThreadEntry::AssistantMessage(_) => { + if index == self.entries.len() { + self.entries.push(Entry::empty()) + } + } + }; + } + + fn set_entry(&mut self, index: usize, entry: Entry) { + if index == self.entries.len() { + self.entries.push(entry); + } else { + self.entries[index] = entry; + } + } + + pub fn remove(&mut self, range: Range) { + self.entries.drain(range); + } + + pub fn settings_changed(&mut self, cx: &mut App) { + for entry in self.entries.iter() { + match entry { + Entry::UserMessage { .. } => {} + Entry::Content(response_views) => { + for view in response_views.values() { + if let Ok(diff_editor) = view.clone().downcast::() { + diff_editor.update(cx, |diff_editor, cx| { + diff_editor.set_text_style_refinement( + diff_editor_text_style_refinement(cx), + ); + cx.notify(); + }) + } + } + } + } + } + } +} + +impl EventEmitter for EntryViewState {} + +pub struct EntryViewEvent { + pub entry_index: usize, + pub view_event: ViewEvent, +} + +pub enum ViewEvent { + NewDiff(ToolCallId), + NewTerminal(ToolCallId), + MessageEditorEvent(Entity, MessageEditorEvent), +} + +#[derive(Debug)] +pub enum Entry { + UserMessage(Entity), + Content(HashMap), +} + +impl Entry { + pub fn message_editor(&self) -> Option<&Entity> { + match self { + Self::UserMessage(editor) => Some(editor), + Entry::Content(_) => None, + } + } + + pub fn editor_for_diff(&self, diff: &Entity) -> Option> { + self.content_map()? + .get(&diff.entity_id()) + .cloned() + .map(|entity| entity.downcast::().unwrap()) + } + + pub fn terminal( + &self, + terminal: &Entity, + ) -> Option> { + self.content_map()? + .get(&terminal.entity_id()) + .cloned() + .map(|entity| entity.downcast::().unwrap()) + } + + fn content_map(&self) -> Option<&HashMap> { + match self { + Self::Content(map) => Some(map), + _ => None, + } + } + + fn empty() -> Self { + Self::Content(HashMap::default()) + } + + #[cfg(test)] + pub fn has_content(&self) -> bool { + match self { + Self::Content(map) => !map.is_empty(), + Self::UserMessage(_) => false, + } + } +} + +fn create_terminal( + workspace: WeakEntity, + project: Entity, + terminal: Entity, + window: &mut Window, + cx: &mut App, +) -> Entity { + cx.new(|cx| { + let mut view = TerminalView::new( + terminal.read(cx).inner().clone(), + workspace.clone(), + None, + project.downgrade(), + window, + cx, + ); + view.set_embedded_mode(Some(1000), cx); + view + }) +} + +fn create_editor_diff( + diff: Entity, + window: &mut Window, + cx: &mut App, +) -> Entity { + cx.new(|cx| { + let mut editor = Editor::new( + EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sized_by_content: true, + }, + diff.read(cx).multibuffer().clone(), + None, + window, + cx, + ); + editor.set_show_gutter(false, cx); + editor.disable_inline_diagnostics(); + editor.disable_expand_excerpt_buttons(cx); + editor.set_show_vertical_scrollbar(false, cx); + editor.set_minimap_visibility(MinimapVisibility::Disabled, window, cx); + editor.set_soft_wrap_mode(SoftWrap::None, cx); + editor.scroll_manager.set_forbid_vertical_scroll(true); + editor.set_show_indent_guides(false, cx); + editor.set_read_only(true); + editor.set_show_breakpoints(false, cx); + editor.set_show_code_actions(false, cx); + editor.set_show_git_diff_gutter(false, cx); + editor.set_expand_all_diff_hunks(cx); + editor.set_text_style_refinement(diff_editor_text_style_refinement(cx)); + editor + }) +} + +fn diff_editor_text_style_refinement(cx: &mut App) -> TextStyleRefinement { + TextStyleRefinement { + font_size: Some( + TextSize::Small + .rems(cx) + .to_pixels(ThemeSettings::get_global(cx).agent_font_size(cx)) + .into(), + ), + ..Default::default() + } +} + +#[cfg(test)] +mod tests { + use std::{path::Path, rc::Rc}; + + use acp_thread::{AgentConnection, StubAgentConnection}; + use agent_client_protocol as acp; + use agent_settings::AgentSettings; + use agent2::HistoryStore; + use assistant_context::ContextStore; + use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind}; + use editor::{EditorSettings, RowInfo}; + use fs::FakeFs; + use gpui::{AppContext as _, SemanticVersion, TestAppContext}; + + use crate::acp::entry_view_state::EntryViewState; + use multi_buffer::MultiBufferRow; + use pretty_assertions::assert_matches; + use project::Project; + use serde_json::json; + use settings::{Settings as _, SettingsStore}; + use theme::ThemeSettings; + use util::path; + use workspace::Workspace; + + #[gpui::test] + async fn test_diff_sync(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + "hello.txt": "hi world" + }), + ) + .await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let tool_call = acp::ToolCall { + id: acp::ToolCallId("tool".into()), + title: "Tool call".into(), + kind: acp::ToolKind::Other, + status: acp::ToolCallStatus::InProgress, + content: vec![acp::ToolCallContent::Diff { + diff: acp::Diff { + path: "/project/hello.txt".into(), + old_text: Some("hi world".into()), + new_text: "hello world".into(), + }, + }], + locations: vec![], + raw_input: None, + raw_output: None, + }; + let connection = Rc::new(StubAgentConnection::new()); + let thread = cx + .update(|_, cx| { + connection + .clone() + .new_thread(project.clone(), Path::new(path!("/project")), cx) + }) + .await + .unwrap(); + let session_id = thread.update(cx, |thread, _| thread.session_id().clone()); + + cx.update(|_, cx| { + connection.send_update(session_id, acp::SessionUpdate::ToolCall(tool_call), cx) + }); + + let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + + let view_state = cx.new(|_cx| { + EntryViewState::new( + workspace.downgrade(), + project.clone(), + history_store, + None, + false, + ) + }); + + view_state.update_in(cx, |view_state, window, cx| { + view_state.sync_entry(0, &thread, window, cx) + }); + + let diff = thread.read_with(cx, |thread, _cx| { + thread + .entries() + .get(0) + .unwrap() + .diffs() + .next() + .unwrap() + .clone() + }); + + cx.run_until_parked(); + + let diff_editor = view_state.read_with(cx, |view_state, _cx| { + view_state.entry(0).unwrap().editor_for_diff(&diff).unwrap() + }); + assert_eq!( + diff_editor.read_with(cx, |editor, cx| editor.text(cx)), + "hi world\nhello world" + ); + let row_infos = diff_editor.read_with(cx, |editor, cx| { + let multibuffer = editor.buffer().read(cx); + multibuffer + .snapshot(cx) + .row_infos(MultiBufferRow(0)) + .collect::>() + }); + assert_matches!( + row_infos.as_slice(), + [ + RowInfo { + multibuffer_row: Some(MultiBufferRow(0)), + diff_status: Some(DiffHunkStatus { + kind: DiffHunkStatusKind::Deleted, + .. + }), + .. + }, + RowInfo { + multibuffer_row: Some(MultiBufferRow(1)), + diff_status: Some(DiffHunkStatus { + kind: DiffHunkStatusKind::Added, + .. + }), + .. + } + ] + ); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + AgentSettings::register(cx); + workspace::init_settings(cx); + ThemeSettings::register(cx); + release_channel::init(SemanticVersion::default(), cx); + EditorSettings::register(cx); + }); + } +} diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs new file mode 100644 index 0000000000000000000000000000000000000000..dc31c5fe106d4bb16d5d710710b1db39a1a62631 --- /dev/null +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -0,0 +1,2464 @@ +use crate::{ + acp::completion_provider::ContextPickerCompletionProvider, + context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content}, +}; +use acp_thread::{MentionUri, selection_name}; +use agent_client_protocol as acp; +use agent_servers::AgentServer; +use agent2::HistoryStore; +use anyhow::{Context as _, Result, anyhow}; +use assistant_slash_commands::codeblock_fence_for_path; +use collections::{HashMap, HashSet}; +use editor::{ + Addon, Anchor, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, + EditorEvent, EditorMode, EditorStyle, ExcerptId, FoldPlaceholder, MultiBuffer, + SemanticsProvider, ToOffset, + actions::Paste, + display_map::{Crease, CreaseId, FoldId}, +}; +use futures::{ + FutureExt as _, TryFutureExt as _, + future::{Shared, join_all, try_join_all}, +}; +use gpui::{ + AppContext, ClipboardEntry, Context, Entity, EventEmitter, FocusHandle, Focusable, + HighlightStyle, Image, ImageFormat, Img, KeyContext, Subscription, Task, TextStyle, + UnderlineStyle, WeakEntity, +}; +use language::{Buffer, Language}; +use language_model::LanguageModelImage; +use project::{CompletionIntent, Project, ProjectItem, ProjectPath, Worktree}; +use prompt_store::PromptStore; +use rope::Point; +use settings::Settings; +use std::{ + cell::Cell, + ffi::OsStr, + fmt::Write, + ops::Range, + path::{Path, PathBuf}, + rc::Rc, + sync::Arc, + time::Duration, +}; +use text::{OffsetRangeExt, ToOffset as _}; +use theme::ThemeSettings; +use ui::{ + ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, ButtonStyle, Color, Icon, IconName, + IconSize, InteractiveElement, IntoElement, Label, LabelCommon, LabelSize, ParentElement, + Render, SelectableButton, SharedString, Styled, TextSize, TintColor, Toggleable, Window, div, + h_flex, px, +}; +use url::Url; +use util::ResultExt; +use workspace::{ + Toast, Workspace, + notifications::{NotificationId, NotifyResultExt as _}, +}; +use zed_actions::agent::Chat; + +const PARSE_SLASH_COMMAND_DEBOUNCE: Duration = Duration::from_millis(50); + +pub struct MessageEditor { + mention_set: MentionSet, + editor: Entity, + project: Entity, + workspace: WeakEntity, + history_store: Entity, + prompt_store: Option>, + prevent_slash_commands: bool, + prompt_capabilities: Rc>, + _subscriptions: Vec, + _parse_slash_command_task: Task<()>, +} + +#[derive(Clone, Copy, Debug)] +pub enum MessageEditorEvent { + Send, + Cancel, + Focus, +} + +impl EventEmitter for MessageEditor {} + +impl MessageEditor { + pub fn new( + workspace: WeakEntity, + project: Entity, + history_store: Entity, + prompt_store: Option>, + placeholder: impl Into>, + prevent_slash_commands: bool, + mode: EditorMode, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let language = Language::new( + language::LanguageConfig { + completion_query_characters: HashSet::from_iter(['.', '-', '_', '@']), + ..Default::default() + }, + None, + ); + let prompt_capabilities = Rc::new(Cell::new(acp::PromptCapabilities::default())); + let completion_provider = ContextPickerCompletionProvider::new( + cx.weak_entity(), + workspace.clone(), + history_store.clone(), + prompt_store.clone(), + prompt_capabilities.clone(), + ); + let semantics_provider = Rc::new(SlashCommandSemanticsProvider { + range: Cell::new(None), + }); + let mention_set = MentionSet::default(); + let editor = cx.new(|cx| { + let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + + let mut editor = Editor::new(mode, buffer, None, window, cx); + editor.set_placeholder_text(placeholder, cx); + editor.set_show_indent_guides(false, cx); + editor.set_soft_wrap(); + editor.set_use_modal_editing(true); + editor.set_completion_provider(Some(Rc::new(completion_provider))); + editor.set_context_menu_options(ContextMenuOptions { + min_entries_visible: 12, + max_entries_visible: 12, + placement: Some(ContextMenuPlacement::Above), + }); + if prevent_slash_commands { + editor.set_semantics_provider(Some(semantics_provider.clone())); + } + editor.register_addon(MessageEditorAddon::new()); + editor + }); + + cx.on_focus(&editor.focus_handle(cx), window, |_, _, cx| { + cx.emit(MessageEditorEvent::Focus) + }) + .detach(); + + let mut subscriptions = Vec::new(); + if prevent_slash_commands { + subscriptions.push(cx.subscribe_in(&editor, window, { + let semantics_provider = semantics_provider.clone(); + move |this, editor, event, window, cx| { + if let EditorEvent::Edited { .. } = event { + this.highlight_slash_command( + semantics_provider.clone(), + editor.clone(), + window, + cx, + ); + } + } + })); + } + + Self { + editor, + project, + mention_set, + workspace, + history_store, + prompt_store, + prevent_slash_commands, + prompt_capabilities, + _subscriptions: subscriptions, + _parse_slash_command_task: Task::ready(()), + } + } + + pub fn insert_thread_summary( + &mut self, + thread: agent2::DbThreadMetadata, + window: &mut Window, + cx: &mut Context, + ) { + let start = self.editor.update(cx, |editor, cx| { + editor.set_text(format!("{}\n", thread.title), window, cx); + editor + .buffer() + .read(cx) + .snapshot(cx) + .anchor_before(Point::zero()) + .text_anchor + }); + + self.confirm_completion( + thread.title.clone(), + start, + thread.title.len(), + MentionUri::Thread { + id: thread.id.clone(), + name: thread.title.to_string(), + }, + window, + cx, + ) + .detach(); + } + + pub fn set_prompt_capabilities(&mut self, capabilities: acp::PromptCapabilities) { + self.prompt_capabilities.set(capabilities); + } + + #[cfg(test)] + pub(crate) fn editor(&self) -> &Entity { + &self.editor + } + + #[cfg(test)] + pub(crate) fn mention_set(&mut self) -> &mut MentionSet { + &mut self.mention_set + } + + pub fn is_empty(&self, cx: &App) -> bool { + self.editor.read(cx).is_empty(cx) + } + + pub fn mentions(&self) -> HashSet { + self.mention_set + .uri_by_crease_id + .values() + .cloned() + .collect() + } + + pub fn confirm_completion( + &mut self, + crease_text: SharedString, + start: text::Anchor, + content_len: usize, + mention_uri: MentionUri, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let snapshot = self + .editor + .update(cx, |editor, cx| editor.snapshot(window, cx)); + let Some((excerpt_id, _, _)) = snapshot.buffer_snapshot.as_singleton() else { + return Task::ready(()); + }; + let Some(start_anchor) = snapshot + .buffer_snapshot + .anchor_in_excerpt(*excerpt_id, start) + else { + return Task::ready(()); + }; + + if let MentionUri::File { abs_path, .. } = &mention_uri { + let extension = abs_path + .extension() + .and_then(OsStr::to_str) + .unwrap_or_default(); + + if Img::extensions().contains(&extension) && !extension.contains("svg") { + if !self.prompt_capabilities.get().image { + struct ImagesNotAllowed; + + let end_anchor = snapshot.buffer_snapshot.anchor_before( + start_anchor.to_offset(&snapshot.buffer_snapshot) + content_len + 1, + ); + + self.editor.update(cx, |editor, cx| { + // Remove mention + editor.edit([((start_anchor..end_anchor), "")], cx); + }); + + self.workspace + .update(cx, |workspace, cx| { + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + "This agent does not support images yet", + ) + .autohide(), + cx, + ); + }) + .ok(); + return Task::ready(()); + } + + let project = self.project.clone(); + let Some(project_path) = project + .read(cx) + .project_path_for_absolute_path(abs_path, cx) + else { + return Task::ready(()); + }; + let image = cx + .spawn(async move |_, cx| { + let image = project + .update(cx, |project, cx| project.open_image(project_path, cx)) + .map_err(|e| e.to_string())? + .await + .map_err(|e| e.to_string())?; + image + .read_with(cx, |image, _cx| image.image.clone()) + .map_err(|e| e.to_string()) + }) + .shared(); + let Some(crease_id) = insert_crease_for_image( + *excerpt_id, + start, + content_len, + Some(abs_path.as_path().into()), + image.clone(), + self.editor.clone(), + window, + cx, + ) else { + return Task::ready(()); + }; + return self.confirm_mention_for_image( + crease_id, + start_anchor, + Some(abs_path.clone()), + image, + window, + cx, + ); + } + } + + let Some(crease_id) = crate::context_picker::insert_crease_for_mention( + *excerpt_id, + start, + content_len, + crease_text, + mention_uri.icon_path(cx), + self.editor.clone(), + window, + cx, + ) else { + return Task::ready(()); + }; + + match mention_uri { + MentionUri::Fetch { url } => { + self.confirm_mention_for_fetch(crease_id, start_anchor, url, window, cx) + } + MentionUri::Directory { abs_path } => { + self.confirm_mention_for_directory(crease_id, start_anchor, abs_path, window, cx) + } + MentionUri::Thread { id, name } => { + self.confirm_mention_for_thread(crease_id, start_anchor, id, name, window, cx) + } + MentionUri::TextThread { path, name } => self.confirm_mention_for_text_thread( + crease_id, + start_anchor, + path, + name, + window, + cx, + ), + MentionUri::File { .. } + | MentionUri::Symbol { .. } + | MentionUri::Rule { .. } + | MentionUri::Selection { .. } => { + self.mention_set.insert_uri(crease_id, mention_uri.clone()); + Task::ready(()) + } + } + } + + fn confirm_mention_for_directory( + &mut self, + crease_id: CreaseId, + anchor: Anchor, + abs_path: PathBuf, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<(Arc, PathBuf)> { + let mut files = Vec::new(); + + for entry in worktree.child_entries(path) { + if entry.is_dir() { + files.extend(collect_files_in_path(worktree, &entry.path)); + } else if entry.is_file() { + files.push((entry.path.clone(), worktree.full_path(&entry.path))); + } + } + + files + } + + let uri = MentionUri::Directory { + abs_path: abs_path.clone(), + }; + let Some(project_path) = self + .project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + return Task::ready(()); + }; + let Some(entry) = self.project.read(cx).entry_for_path(&project_path, cx) else { + return Task::ready(()); + }; + let Some(worktree) = self.project.read(cx).worktree_for_entry(entry.id, cx) else { + return Task::ready(()); + }; + let project = self.project.clone(); + let task = cx.spawn(async move |_, cx| { + let directory_path = entry.path.clone(); + + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?; + let file_paths = worktree.read_with(cx, |worktree, _cx| { + collect_files_in_path(worktree, &directory_path) + })?; + let descendants_future = cx.update(|cx| { + join_all(file_paths.into_iter().map(|(worktree_path, full_path)| { + let rel_path = worktree_path + .strip_prefix(&directory_path) + .log_err() + .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into()); + + let open_task = project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + let project_path = ProjectPath { + worktree_id, + path: worktree_path, + }; + buffer_store.open_buffer(project_path, cx) + }) + }); + + // TODO: report load errors instead of just logging + let rope_task = cx.spawn(async move |cx| { + let buffer = open_task.await.log_err()?; + let rope = buffer + .read_with(cx, |buffer, _cx| buffer.as_rope().clone()) + .log_err()?; + Some((rope, buffer)) + }); + + cx.background_spawn(async move { + let (rope, buffer) = rope_task.await?; + Some((rel_path, full_path, rope.to_string(), buffer)) + }) + })) + })?; + + let contents = cx + .background_spawn(async move { + let (contents, tracked_buffers) = descendants_future + .await + .into_iter() + .flatten() + .map(|(rel_path, full_path, rope, buffer)| { + ((rel_path, full_path, rope), buffer) + }) + .unzip(); + (render_directory_contents(contents), tracked_buffers) + }) + .await; + anyhow::Ok(contents) + }); + let task = cx + .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) + .shared(); + + self.mention_set + .directories + .insert(abs_path.clone(), task.clone()); + + let editor = self.editor.clone(); + cx.spawn_in(window, async move |this, cx| { + if task.await.notify_async_err(cx).is_some() { + this.update(cx, |this, _| { + this.mention_set.insert_uri(crease_id, uri); + }) + .ok(); + } else { + editor + .update(cx, |editor, cx| { + editor.display_map.update(cx, |display_map, cx| { + display_map.unfold_intersecting(vec![anchor..anchor], true, cx); + }); + editor.remove_creases([crease_id], cx); + }) + .ok(); + this.update(cx, |this, _cx| { + this.mention_set.directories.remove(&abs_path); + }) + .ok(); + } + }) + } + + fn confirm_mention_for_fetch( + &mut self, + crease_id: CreaseId, + anchor: Anchor, + url: url::Url, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let Some(http_client) = self + .workspace + .update(cx, |workspace, _cx| workspace.client().http_client()) + .ok() + else { + return Task::ready(()); + }; + + let url_string = url.to_string(); + let fetch = cx + .background_executor() + .spawn(async move { + fetch_url_content(http_client, url_string) + .map_err(|e| e.to_string()) + .await + }) + .shared(); + self.mention_set + .add_fetch_result(url.clone(), fetch.clone()); + + cx.spawn_in(window, async move |this, cx| { + let fetch = fetch.await.notify_async_err(cx); + this.update(cx, |this, cx| { + if fetch.is_some() { + this.mention_set + .insert_uri(crease_id, MentionUri::Fetch { url }); + } else { + // Remove crease if we failed to fetch + this.editor.update(cx, |editor, cx| { + editor.display_map.update(cx, |display_map, cx| { + display_map.unfold_intersecting(vec![anchor..anchor], true, cx); + }); + editor.remove_creases([crease_id], cx); + }); + this.mention_set.fetch_results.remove(&url); + } + }) + .ok(); + }) + } + + pub fn confirm_mention_for_selection( + &mut self, + source_range: Range, + selections: Vec<(Entity, Range, Range)>, + window: &mut Window, + cx: &mut Context, + ) { + let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); + let Some((&excerpt_id, _, _)) = snapshot.as_singleton() else { + return; + }; + let Some(start) = snapshot.anchor_in_excerpt(excerpt_id, source_range.start) else { + return; + }; + + let offset = start.to_offset(&snapshot); + + for (buffer, selection_range, range_to_fold) in selections { + let range = snapshot.anchor_after(offset + range_to_fold.start) + ..snapshot.anchor_after(offset + range_to_fold.end); + + // TODO support selections from buffers with no path + let Some(project_path) = buffer.read(cx).project_path(cx) else { + continue; + }; + let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else { + continue; + }; + let snapshot = buffer.read(cx).snapshot(); + + let point_range = selection_range.to_point(&snapshot); + let line_range = point_range.start.row..point_range.end.row; + + let uri = MentionUri::Selection { + path: abs_path.clone(), + line_range: line_range.clone(), + }; + let crease = crate::context_picker::crease_for_mention( + selection_name(&abs_path, &line_range).into(), + uri.icon_path(cx), + range, + self.editor.downgrade(), + ); + + let crease_id = self.editor.update(cx, |editor, cx| { + let crease_ids = editor.insert_creases(vec![crease.clone()], cx); + editor.fold_creases(vec![crease], false, window, cx); + crease_ids.first().copied().unwrap() + }); + + self.mention_set.insert_uri(crease_id, uri); + } + } + + fn confirm_mention_for_thread( + &mut self, + crease_id: CreaseId, + anchor: Anchor, + id: acp::SessionId, + name: String, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let uri = MentionUri::Thread { + id: id.clone(), + name, + }; + let server = Rc::new(agent2::NativeAgentServer::new( + self.project.read(cx).fs().clone(), + self.history_store.clone(), + )); + let connection = server.connect(Path::new(""), &self.project, cx); + let load_summary = cx.spawn({ + let id = id.clone(); + async move |_, cx| { + let agent = connection.await?; + let agent = agent.downcast::().unwrap(); + let summary = agent + .0 + .update(cx, |agent, cx| agent.thread_summary(id, cx))? + .await?; + anyhow::Ok(summary) + } + }); + let task = cx + .spawn(async move |_, _| load_summary.await.map_err(|e| format!("{e}"))) + .shared(); + + self.mention_set.insert_thread(id.clone(), task.clone()); + self.mention_set.insert_uri(crease_id, uri); + + let editor = self.editor.clone(); + cx.spawn_in(window, async move |this, cx| { + if task.await.notify_async_err(cx).is_none() { + editor + .update(cx, |editor, cx| { + editor.display_map.update(cx, |display_map, cx| { + display_map.unfold_intersecting(vec![anchor..anchor], true, cx); + }); + editor.remove_creases([crease_id], cx); + }) + .ok(); + this.update(cx, |this, _| { + this.mention_set.thread_summaries.remove(&id); + this.mention_set.uri_by_crease_id.remove(&crease_id); + }) + .ok(); + } + }) + } + + fn confirm_mention_for_text_thread( + &mut self, + crease_id: CreaseId, + anchor: Anchor, + path: PathBuf, + name: String, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let uri = MentionUri::TextThread { + path: path.clone(), + name, + }; + let context = self.history_store.update(cx, |text_thread_store, cx| { + text_thread_store.load_text_thread(path.as_path().into(), cx) + }); + let task = cx + .spawn(async move |_, cx| { + let context = context.await.map_err(|e| e.to_string())?; + let xml = context + .update(cx, |context, cx| context.to_xml(cx)) + .map_err(|e| e.to_string())?; + Ok(xml) + }) + .shared(); + + self.mention_set + .insert_text_thread(path.clone(), task.clone()); + + let editor = self.editor.clone(); + cx.spawn_in(window, async move |this, cx| { + if task.await.notify_async_err(cx).is_some() { + this.update(cx, |this, _| { + this.mention_set.insert_uri(crease_id, uri); + }) + .ok(); + } else { + editor + .update(cx, |editor, cx| { + editor.display_map.update(cx, |display_map, cx| { + display_map.unfold_intersecting(vec![anchor..anchor], true, cx); + }); + editor.remove_creases([crease_id], cx); + }) + .ok(); + this.update(cx, |this, _| { + this.mention_set.text_thread_summaries.remove(&path); + }) + .ok(); + } + }) + } + + pub fn contents( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Task, Vec>)>> { + let contents = self.mention_set.contents( + &self.project, + self.prompt_store.as_ref(), + &self.prompt_capabilities.get(), + window, + cx, + ); + let editor = self.editor.clone(); + let prevent_slash_commands = self.prevent_slash_commands; + + cx.spawn(async move |_, cx| { + let contents = contents.await?; + let mut all_tracked_buffers = Vec::new(); + + editor.update(cx, |editor, cx| { + let mut ix = 0; + let mut chunks: Vec = Vec::new(); + let text = editor.text(cx); + editor.display_map.update(cx, |map, cx| { + let snapshot = map.snapshot(cx); + for (crease_id, crease) in snapshot.crease_snapshot.creases() { + // Skip creases that have been edited out of the message buffer. + if !crease.range().start.is_valid(&snapshot.buffer_snapshot) { + continue; + } + + let Some(mention) = contents.get(&crease_id) else { + continue; + }; + + let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot); + if crease_range.start > ix { + let chunk = if prevent_slash_commands + && ix == 0 + && parse_slash_command(&text[ix..]).is_some() + { + format!(" {}", &text[ix..crease_range.start]).into() + } else { + text[ix..crease_range.start].into() + }; + chunks.push(chunk); + } + let chunk = match mention { + Mention::Text { + uri, + content, + tracked_buffers, + } => { + all_tracked_buffers.extend(tracked_buffers.iter().cloned()); + acp::ContentBlock::Resource(acp::EmbeddedResource { + annotations: None, + resource: acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents { + mime_type: None, + text: content.clone(), + uri: uri.to_uri().to_string(), + }, + ), + }) + } + Mention::Image(mention_image) => { + acp::ContentBlock::Image(acp::ImageContent { + annotations: None, + data: mention_image.data.to_string(), + mime_type: mention_image.format.mime_type().into(), + uri: mention_image + .abs_path + .as_ref() + .map(|path| format!("file://{}", path.display())), + }) + } + Mention::UriOnly(uri) => { + acp::ContentBlock::ResourceLink(acp::ResourceLink { + name: uri.name(), + uri: uri.to_uri().to_string(), + annotations: None, + description: None, + mime_type: None, + size: None, + title: None, + }) + } + }; + chunks.push(chunk); + ix = crease_range.end; + } + + if ix < text.len() { + let last_chunk = if prevent_slash_commands + && ix == 0 + && parse_slash_command(&text[ix..]).is_some() + { + format!(" {}", text[ix..].trim_end()) + } else { + text[ix..].trim_end().to_owned() + }; + if !last_chunk.is_empty() { + chunks.push(last_chunk.into()); + } + } + }); + + (chunks, all_tracked_buffers) + }) + }) + } + + pub fn clear(&mut self, window: &mut Window, cx: &mut Context) { + self.editor.update(cx, |editor, cx| { + editor.clear(window, cx); + editor.remove_creases(self.mention_set.drain(), cx) + }); + } + + fn send(&mut self, _: &Chat, _: &mut Window, cx: &mut Context) { + if self.is_empty(cx) { + return; + } + cx.emit(MessageEditorEvent::Send) + } + + fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context) { + cx.emit(MessageEditorEvent::Cancel) + } + + fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + if !self.prompt_capabilities.get().image { + return; + } + + let images = cx + .read_from_clipboard() + .map(|item| { + item.into_entries() + .filter_map(|entry| { + if let ClipboardEntry::Image(image) = entry { + Some(image) + } else { + None + } + }) + .collect::>() + }) + .unwrap_or_default(); + + if images.is_empty() { + return; + } + cx.stop_propagation(); + + let replacement_text = "image"; + for image in images { + let (excerpt_id, text_anchor, multibuffer_anchor) = + self.editor.update(cx, |message_editor, cx| { + let snapshot = message_editor.snapshot(window, cx); + let (excerpt_id, _, buffer_snapshot) = + snapshot.buffer_snapshot.as_singleton().unwrap(); + + let text_anchor = buffer_snapshot.anchor_before(buffer_snapshot.len()); + let multibuffer_anchor = snapshot + .buffer_snapshot + .anchor_in_excerpt(*excerpt_id, text_anchor); + message_editor.edit( + [( + multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + format!("{replacement_text} "), + )], + cx, + ); + (*excerpt_id, text_anchor, multibuffer_anchor) + }); + + let content_len = replacement_text.len(); + let Some(anchor) = multibuffer_anchor else { + return; + }; + let task = Task::ready(Ok(Arc::new(image))).shared(); + let Some(crease_id) = insert_crease_for_image( + excerpt_id, + text_anchor, + content_len, + None.clone(), + task.clone(), + self.editor.clone(), + window, + cx, + ) else { + return; + }; + self.confirm_mention_for_image(crease_id, anchor, None, task, window, cx) + .detach(); + } + } + + pub fn insert_dragged_files( + &mut self, + paths: Vec, + added_worktrees: Vec>, + window: &mut Window, + cx: &mut Context, + ) { + let buffer = self.editor.read(cx).buffer().clone(); + let Some(buffer) = buffer.read(cx).as_singleton() else { + return; + }; + let mut tasks = Vec::new(); + for path in paths { + let Some(entry) = self.project.read(cx).entry_for_path(&path, cx) else { + continue; + }; + let Some(abs_path) = self.project.read(cx).absolute_path(&path, cx) else { + continue; + }; + let path_prefix = abs_path + .file_name() + .unwrap_or(path.path.as_os_str()) + .display() + .to_string(); + let (file_name, _) = + crate::context_picker::file_context_picker::extract_file_name_and_directory( + &path.path, + &path_prefix, + ); + + let uri = if entry.is_dir() { + MentionUri::Directory { abs_path } + } else { + MentionUri::File { abs_path } + }; + + let new_text = format!("{} ", uri.as_link()); + let content_len = new_text.len() - 1; + + let anchor = buffer.update(cx, |buffer, _cx| buffer.anchor_before(buffer.len())); + + self.editor.update(cx, |message_editor, cx| { + message_editor.edit( + [( + multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + new_text, + )], + cx, + ); + }); + tasks.push(self.confirm_completion(file_name, anchor, content_len, uri, window, cx)); + } + cx.spawn(async move |_, _| { + join_all(tasks).await; + drop(added_worktrees); + }) + .detach(); + } + + pub fn insert_selections(&mut self, window: &mut Window, cx: &mut Context) { + let buffer = self.editor.read(cx).buffer().clone(); + let Some(buffer) = buffer.read(cx).as_singleton() else { + return; + }; + let anchor = buffer.update(cx, |buffer, _cx| buffer.anchor_before(buffer.len())); + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + let Some(completion) = ContextPickerCompletionProvider::completion_for_action( + ContextPickerAction::AddSelections, + anchor..anchor, + cx.weak_entity(), + &workspace, + cx, + ) else { + return; + }; + self.editor.update(cx, |message_editor, cx| { + message_editor.edit( + [( + multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + completion.new_text, + )], + cx, + ); + }); + if let Some(confirm) = completion.confirm { + confirm(CompletionIntent::Complete, window, cx); + } + } + + pub fn set_read_only(&mut self, read_only: bool, cx: &mut Context) { + self.editor.update(cx, |message_editor, cx| { + message_editor.set_read_only(read_only); + cx.notify() + }) + } + + fn confirm_mention_for_image( + &mut self, + crease_id: CreaseId, + anchor: Anchor, + abs_path: Option, + image: Shared, String>>>, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let editor = self.editor.clone(); + let task = cx + .spawn_in(window, { + let abs_path = abs_path.clone(); + async move |_, cx| { + let image = image.await?; + let format = image.format; + let image = cx + .update(|_, cx| LanguageModelImage::from_image(image, cx)) + .map_err(|e| e.to_string())? + .await; + if let Some(image) = image { + Ok(MentionImage { + abs_path, + data: image.source, + format, + }) + } else { + Err("Failed to convert image".into()) + } + } + }) + .shared(); + + self.mention_set.insert_image(crease_id, task.clone()); + + cx.spawn_in(window, async move |this, cx| { + if task.await.notify_async_err(cx).is_some() { + if let Some(abs_path) = abs_path.clone() { + this.update(cx, |this, _cx| { + this.mention_set + .insert_uri(crease_id, MentionUri::File { abs_path }); + }) + .ok(); + } + } else { + editor + .update(cx, |editor, cx| { + editor.display_map.update(cx, |display_map, cx| { + display_map.unfold_intersecting(vec![anchor..anchor], true, cx); + }); + editor.remove_creases([crease_id], cx); + }) + .ok(); + this.update(cx, |this, _cx| { + this.mention_set.images.remove(&crease_id); + }) + .ok(); + } + }) + } + + pub fn set_mode(&mut self, mode: EditorMode, cx: &mut Context) { + self.editor.update(cx, |editor, cx| { + editor.set_mode(mode); + cx.notify() + }); + } + + pub fn set_message( + &mut self, + message: Vec, + window: &mut Window, + cx: &mut Context, + ) { + self.clear(window, cx); + + let mut text = String::new(); + let mut mentions = Vec::new(); + let mut images = Vec::new(); + + for chunk in message { + match chunk { + acp::ContentBlock::Text(text_content) => { + text.push_str(&text_content.text); + } + acp::ContentBlock::Resource(acp::EmbeddedResource { + resource: acp::EmbeddedResourceResource::TextResourceContents(resource), + .. + }) => { + if let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() { + let start = text.len(); + write!(&mut text, "{}", mention_uri.as_link()).ok(); + let end = text.len(); + mentions.push((start..end, mention_uri, resource.text)); + } + } + acp::ContentBlock::Image(content) => { + let start = text.len(); + text.push_str("image"); + let end = text.len(); + images.push((start..end, content)); + } + acp::ContentBlock::Audio(_) + | acp::ContentBlock::Resource(_) + | acp::ContentBlock::ResourceLink(_) => {} + } + } + + let snapshot = self.editor.update(cx, |editor, cx| { + editor.set_text(text, window, cx); + editor.buffer().read(cx).snapshot(cx) + }); + + for (range, mention_uri, text) in mentions { + let anchor = snapshot.anchor_before(range.start); + let crease_id = crate::context_picker::insert_crease_for_mention( + anchor.excerpt_id, + anchor.text_anchor, + range.end - range.start, + mention_uri.name().into(), + mention_uri.icon_path(cx), + self.editor.clone(), + window, + cx, + ); + + if let Some(crease_id) = crease_id { + self.mention_set.insert_uri(crease_id, mention_uri.clone()); + } + + match mention_uri { + MentionUri::Thread { id, .. } => { + self.mention_set + .insert_thread(id, Task::ready(Ok(text.into())).shared()); + } + MentionUri::TextThread { path, .. } => { + self.mention_set + .insert_text_thread(path, Task::ready(Ok(text)).shared()); + } + MentionUri::Fetch { url } => { + self.mention_set + .add_fetch_result(url, Task::ready(Ok(text)).shared()); + } + MentionUri::Directory { abs_path } => { + let task = Task::ready(Ok((text, Vec::new()))).shared(); + self.mention_set.directories.insert(abs_path, task); + } + MentionUri::File { .. } + | MentionUri::Symbol { .. } + | MentionUri::Rule { .. } + | MentionUri::Selection { .. } => {} + } + } + for (range, content) in images { + let Some(format) = ImageFormat::from_mime_type(&content.mime_type) else { + continue; + }; + let anchor = snapshot.anchor_before(range.start); + let abs_path = content + .uri + .as_ref() + .and_then(|uri| uri.strip_prefix("file://").map(|s| Path::new(s).into())); + + let name = content + .uri + .as_ref() + .and_then(|uri| { + uri.strip_prefix("file://") + .and_then(|path| Path::new(path).file_name()) + }) + .map(|name| name.to_string_lossy().to_string()) + .unwrap_or("Image".to_owned()); + let crease_id = crate::context_picker::insert_crease_for_mention( + anchor.excerpt_id, + anchor.text_anchor, + range.end - range.start, + name.into(), + IconName::Image.path().into(), + self.editor.clone(), + window, + cx, + ); + let data: SharedString = content.data.to_string().into(); + + if let Some(crease_id) = crease_id { + self.mention_set.insert_image( + crease_id, + Task::ready(Ok(MentionImage { + abs_path, + data, + format, + })) + .shared(), + ); + } + } + cx.notify(); + } + + fn highlight_slash_command( + &mut self, + semantics_provider: Rc, + editor: Entity, + window: &mut Window, + cx: &mut Context, + ) { + struct InvalidSlashCommand; + + self._parse_slash_command_task = cx.spawn_in(window, async move |_, cx| { + cx.background_executor() + .timer(PARSE_SLASH_COMMAND_DEBOUNCE) + .await; + editor + .update_in(cx, |editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let range = parse_slash_command(&editor.text(cx)); + semantics_provider.range.set(range); + if let Some((start, end)) = range { + editor.highlight_text::( + vec![ + snapshot.buffer_snapshot.anchor_after(start) + ..snapshot.buffer_snapshot.anchor_before(end), + ], + HighlightStyle { + underline: Some(UnderlineStyle { + thickness: px(1.), + color: Some(gpui::red()), + wavy: true, + }), + ..Default::default() + }, + cx, + ); + } else { + editor.clear_highlights::(cx); + } + }) + .ok(); + }) + } + + #[cfg(test)] + pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { + self.editor.update(cx, |editor, cx| { + editor.set_text(text, window, cx); + }); + } + + #[cfg(test)] + pub fn text(&self, cx: &App) -> String { + self.editor.read(cx).text(cx) + } +} + +fn render_directory_contents(entries: Vec<(Arc, PathBuf, String)>) -> String { + let mut output = String::new(); + for (_relative_path, full_path, content) in entries { + let fence = codeblock_fence_for_path(Some(&full_path), None); + write!(output, "\n{fence}\n{content}\n```").unwrap(); + } + output +} + +impl Focusable for MessageEditor { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.editor.focus_handle(cx) + } +} + +impl Render for MessageEditor { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + div() + .key_context("MessageEditor") + .on_action(cx.listener(Self::send)) + .on_action(cx.listener(Self::cancel)) + .capture_action(cx.listener(Self::paste)) + .flex_1() + .child({ + let settings = ThemeSettings::get_global(cx); + let font_size = TextSize::Small + .rems(cx) + .to_pixels(settings.agent_font_size(cx)); + let line_height = settings.buffer_line_height.value() * font_size; + + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_features: settings.buffer_font.features.clone(), + font_size: font_size.into(), + line_height: line_height.into(), + ..Default::default() + }; + + EditorElement::new( + &self.editor, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + syntax: cx.theme().syntax().clone(), + ..Default::default() + }, + ) + }) + } +} + +pub(crate) fn insert_crease_for_image( + excerpt_id: ExcerptId, + anchor: text::Anchor, + content_len: usize, + abs_path: Option>, + image: Shared, String>>>, + editor: Entity, + window: &mut Window, + cx: &mut App, +) -> Option { + let crease_label = abs_path + .as_ref() + .and_then(|path| path.file_name()) + .map(|name| name.to_string_lossy().to_string().into()) + .unwrap_or(SharedString::from("Image")); + + editor.update(cx, |editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + + let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?; + + let start = start.bias_right(&snapshot); + let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); + + let placeholder = FoldPlaceholder { + render: render_image_fold_icon_button(crease_label, image, cx.weak_entity()), + merge_adjacent: false, + ..Default::default() + }; + + let crease = Crease::Inline { + range: start..end, + placeholder, + render_toggle: None, + render_trailer: None, + metadata: None, + }; + + let ids = editor.insert_creases(vec![crease.clone()], cx); + editor.fold_creases(vec![crease], false, window, cx); + + Some(ids[0]) + }) +} + +fn render_image_fold_icon_button( + label: SharedString, + image_task: Shared, String>>>, + editor: WeakEntity, +) -> Arc, &mut App) -> AnyElement> { + Arc::new({ + move |fold_id, fold_range, cx| { + let is_in_text_selection = editor + .update(cx, |editor, cx| editor.is_range_selected(&fold_range, cx)) + .unwrap_or_default(); + + ButtonLike::new(fold_id) + .style(ButtonStyle::Filled) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .toggle_state(is_in_text_selection) + .child( + h_flex() + .gap_1() + .child( + Icon::new(IconName::Image) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(label.clone()) + .size(LabelSize::Small) + .buffer_font(cx) + .single_line(), + ), + ) + .hoverable_tooltip({ + let image_task = image_task.clone(); + move |_, cx| { + let image = image_task.peek().cloned().transpose().ok().flatten(); + let image_task = image_task.clone(); + cx.new::(|cx| ImageHover { + image, + _task: cx.spawn(async move |this, cx| { + if let Ok(image) = image_task.clone().await { + this.update(cx, |this, cx| { + if this.image.replace(image).is_none() { + cx.notify(); + } + }) + .ok(); + } + }), + }) + .into() + } + }) + .into_any_element() + } + }) +} + +struct ImageHover { + image: Option>, + _task: Task<()>, +} + +impl Render for ImageHover { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + if let Some(image) = self.image.clone() { + gpui::img(image).max_w_96().max_h_96().into_any_element() + } else { + gpui::Empty.into_any_element() + } + } +} + +#[derive(Debug, Eq, PartialEq)] +pub enum Mention { + Text { + uri: MentionUri, + content: String, + tracked_buffers: Vec>, + }, + Image(MentionImage), + UriOnly(MentionUri), +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct MentionImage { + pub abs_path: Option, + pub data: SharedString, + pub format: ImageFormat, +} + +#[derive(Default)] +pub struct MentionSet { + uri_by_crease_id: HashMap, + fetch_results: HashMap>>>, + images: HashMap>>>, + thread_summaries: HashMap>>>, + text_thread_summaries: HashMap>>>, + directories: HashMap>), String>>>>, +} + +impl MentionSet { + pub fn insert_uri(&mut self, crease_id: CreaseId, uri: MentionUri) { + self.uri_by_crease_id.insert(crease_id, uri); + } + + pub fn add_fetch_result(&mut self, url: Url, content: Shared>>) { + self.fetch_results.insert(url, content); + } + + pub fn insert_image( + &mut self, + crease_id: CreaseId, + task: Shared>>, + ) { + self.images.insert(crease_id, task); + } + + fn insert_thread( + &mut self, + id: acp::SessionId, + task: Shared>>, + ) { + self.thread_summaries.insert(id, task); + } + + fn insert_text_thread(&mut self, path: PathBuf, task: Shared>>) { + self.text_thread_summaries.insert(path, task); + } + + pub fn drain(&mut self) -> impl Iterator { + self.fetch_results.clear(); + self.thread_summaries.clear(); + self.text_thread_summaries.clear(); + self.directories.clear(); + self.uri_by_crease_id + .drain() + .map(|(id, _)| id) + .chain(self.images.drain().map(|(id, _)| id)) + } + + pub fn contents( + &self, + project: &Entity, + prompt_store: Option<&Entity>, + prompt_capabilities: &acp::PromptCapabilities, + _window: &mut Window, + cx: &mut App, + ) -> Task>> { + if !prompt_capabilities.embedded_context { + let mentions = self + .uri_by_crease_id + .iter() + .map(|(crease_id, uri)| (*crease_id, Mention::UriOnly(uri.clone()))) + .collect(); + + return Task::ready(Ok(mentions)); + } + + let mut processed_image_creases = HashSet::default(); + + let mut contents = self + .uri_by_crease_id + .iter() + .map(|(&crease_id, uri)| { + match uri { + MentionUri::File { abs_path, .. } => { + let uri = uri.clone(); + let abs_path = abs_path.to_path_buf(); + + if let Some(task) = self.images.get(&crease_id).cloned() { + processed_image_creases.insert(crease_id); + return cx.spawn(async move |_| { + let image = task.await.map_err(|e| anyhow!("{e}"))?; + anyhow::Ok((crease_id, Mention::Image(image))) + }); + } + + let buffer_task = project.update(cx, |project, cx| { + let path = project + .find_project_path(abs_path, cx) + .context("Failed to find project path")?; + anyhow::Ok(project.open_buffer(path, cx)) + }); + cx.spawn(async move |cx| { + let buffer = buffer_task?.await?; + let content = buffer.read_with(cx, |buffer, _cx| buffer.text())?; + + anyhow::Ok(( + crease_id, + Mention::Text { + uri, + content, + tracked_buffers: vec![buffer], + }, + )) + }) + } + MentionUri::Directory { abs_path } => { + let Some(content) = self.directories.get(abs_path).cloned() else { + return Task::ready(Err(anyhow!("missing directory load task"))); + }; + let uri = uri.clone(); + cx.spawn(async move |_| { + let (content, tracked_buffers) = + content.await.map_err(|e| anyhow::anyhow!("{e}"))?; + Ok(( + crease_id, + Mention::Text { + uri, + content, + tracked_buffers, + }, + )) + }) + } + MentionUri::Symbol { + path, line_range, .. + } + | MentionUri::Selection { + path, line_range, .. + } => { + let uri = uri.clone(); + let path_buf = path.clone(); + let line_range = line_range.clone(); + + let buffer_task = project.update(cx, |project, cx| { + let path = project + .find_project_path(&path_buf, cx) + .context("Failed to find project path")?; + anyhow::Ok(project.open_buffer(path, cx)) + }); + + cx.spawn(async move |cx| { + let buffer = buffer_task?.await?; + let content = buffer.read_with(cx, |buffer, _cx| { + buffer + .text_for_range( + Point::new(line_range.start, 0) + ..Point::new( + line_range.end, + buffer.line_len(line_range.end), + ), + ) + .collect() + })?; + + anyhow::Ok(( + crease_id, + Mention::Text { + uri, + content, + tracked_buffers: vec![buffer], + }, + )) + }) + } + MentionUri::Thread { id, .. } => { + let Some(content) = self.thread_summaries.get(id).cloned() else { + return Task::ready(Err(anyhow!("missing thread summary"))); + }; + let uri = uri.clone(); + cx.spawn(async move |_| { + Ok(( + crease_id, + Mention::Text { + uri, + content: content + .await + .map_err(|e| anyhow::anyhow!("{e}"))? + .to_string(), + tracked_buffers: Vec::new(), + }, + )) + }) + } + MentionUri::TextThread { path, .. } => { + let Some(content) = self.text_thread_summaries.get(path).cloned() else { + return Task::ready(Err(anyhow!("missing text thread summary"))); + }; + let uri = uri.clone(); + cx.spawn(async move |_| { + Ok(( + crease_id, + Mention::Text { + uri, + content: content.await.map_err(|e| anyhow::anyhow!("{e}"))?, + tracked_buffers: Vec::new(), + }, + )) + }) + } + MentionUri::Rule { id: prompt_id, .. } => { + let Some(prompt_store) = prompt_store else { + return Task::ready(Err(anyhow!("missing prompt store"))); + }; + let text_task = prompt_store.read(cx).load(*prompt_id, cx); + let uri = uri.clone(); + cx.spawn(async move |_| { + // TODO: report load errors instead of just logging + let text = text_task.await?; + anyhow::Ok(( + crease_id, + Mention::Text { + uri, + content: text, + tracked_buffers: Vec::new(), + }, + )) + }) + } + MentionUri::Fetch { url } => { + let Some(content) = self.fetch_results.get(url).cloned() else { + return Task::ready(Err(anyhow!("missing fetch result"))); + }; + let uri = uri.clone(); + cx.spawn(async move |_| { + Ok(( + crease_id, + Mention::Text { + uri, + content: content.await.map_err(|e| anyhow::anyhow!("{e}"))?, + tracked_buffers: Vec::new(), + }, + )) + }) + } + } + }) + .collect::>(); + + // Handle images that didn't have a mention URI (because they were added by the paste handler). + contents.extend(self.images.iter().filter_map(|(crease_id, image)| { + if processed_image_creases.contains(crease_id) { + return None; + } + let crease_id = *crease_id; + let image = image.clone(); + Some(cx.spawn(async move |_| { + Ok(( + crease_id, + Mention::Image(image.await.map_err(|e| anyhow::anyhow!("{e}"))?), + )) + })) + })); + + cx.spawn(async move |_cx| { + let contents = try_join_all(contents).await?.into_iter().collect(); + anyhow::Ok(contents) + }) + } +} + +struct SlashCommandSemanticsProvider { + range: Cell>, +} + +impl SemanticsProvider for SlashCommandSemanticsProvider { + fn hover( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>>> { + let snapshot = buffer.read(cx).snapshot(); + let offset = position.to_offset(&snapshot); + let (start, end) = self.range.get()?; + if !(start..end).contains(&offset) { + return None; + } + let range = snapshot.anchor_after(start)..snapshot.anchor_after(end); + Some(Task::ready(Some(vec![project::Hover { + contents: vec![project::HoverBlock { + text: "Slash commands are not supported".into(), + kind: project::HoverBlockKind::PlainText, + }], + range: Some(range), + language: None, + }]))) + } + + fn inline_values( + &self, + _buffer_handle: Entity, + _range: Range, + _cx: &mut App, + ) -> Option>>> { + None + } + + fn inlay_hints( + &self, + _buffer_handle: Entity, + _range: Range, + _cx: &mut App, + ) -> Option>>> { + None + } + + fn resolve_inlay_hint( + &self, + _hint: project::InlayHint, + _buffer_handle: Entity, + _server_id: lsp::LanguageServerId, + _cx: &mut App, + ) -> Option>> { + None + } + + fn supports_inlay_hints(&self, _buffer: &Entity, _cx: &mut App) -> bool { + false + } + + fn document_highlights( + &self, + _buffer: &Entity, + _position: text::Anchor, + _cx: &mut App, + ) -> Option>>> { + None + } + + fn definitions( + &self, + _buffer: &Entity, + _position: text::Anchor, + _kind: editor::GotoDefinitionKind, + _cx: &mut App, + ) -> Option>>>> { + None + } + + fn range_for_rename( + &self, + _buffer: &Entity, + _position: text::Anchor, + _cx: &mut App, + ) -> Option>>>> { + None + } + + fn perform_rename( + &self, + _buffer: &Entity, + _position: text::Anchor, + _new_name: String, + _cx: &mut App, + ) -> Option>> { + None + } +} + +fn parse_slash_command(text: &str) -> Option<(usize, usize)> { + if let Some(remainder) = text.strip_prefix('/') { + let pos = remainder + .find(char::is_whitespace) + .unwrap_or(remainder.len()); + let command = &remainder[..pos]; + if !command.is_empty() && command.chars().all(char::is_alphanumeric) { + return Some((0, 1 + command.len())); + } + } + None +} + +pub struct MessageEditorAddon {} + +impl MessageEditorAddon { + pub fn new() -> Self { + Self {} + } +} + +impl Addon for MessageEditorAddon { + fn to_any(&self) -> &dyn std::any::Any { + self + } + + fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> { + Some(self) + } + + fn extend_key_context(&self, key_context: &mut KeyContext, cx: &App) { + let settings = agent_settings::AgentSettings::get_global(cx); + if settings.use_modifier_to_send { + key_context.add("use_modifier_to_send"); + } + } +} + +#[cfg(test)] +mod tests { + use std::{ops::Range, path::Path, sync::Arc}; + + use acp_thread::MentionUri; + use agent_client_protocol as acp; + use agent2::HistoryStore; + use assistant_context::ContextStore; + use editor::{AnchorRangeExt as _, Editor, EditorMode}; + use fs::FakeFs; + use futures::StreamExt as _; + use gpui::{ + AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext, + }; + use lsp::{CompletionContext, CompletionTriggerKind}; + use project::{CompletionIntent, Project, ProjectPath}; + use serde_json::json; + use text::Point; + use ui::{App, Context, IntoElement, Render, SharedString, Window}; + use util::{path, uri}; + use workspace::{AppState, Item, Workspace}; + + use crate::acp::{ + message_editor::{Mention, MessageEditor}, + thread_view::tests::init_test, + }; + + #[gpui::test] + async fn test_at_mention_removal(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file": ""})).await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + MessageEditor::new( + workspace.downgrade(), + project.clone(), + history_store.clone(), + None, + "Test", + false, + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ) + }) + }); + let editor = message_editor.update(cx, |message_editor, _| message_editor.editor.clone()); + + cx.run_until_parked(); + + let excerpt_id = editor.update(cx, |editor, cx| { + editor + .buffer() + .read(cx) + .excerpt_ids() + .into_iter() + .next() + .unwrap() + }); + let completions = editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello @file ", window, cx); + let buffer = editor.buffer().read(cx).as_singleton().unwrap(); + let completion_provider = editor.completion_provider().unwrap(); + completion_provider.completions( + excerpt_id, + &buffer, + text::Anchor::MAX, + CompletionContext { + trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER, + trigger_character: Some("@".into()), + }, + window, + cx, + ) + }); + let [_, completion]: [_; 2] = completions + .await + .unwrap() + .into_iter() + .flat_map(|response| response.completions) + .collect::>() + .try_into() + .unwrap(); + + editor.update_in(cx, |editor, window, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let start = snapshot + .anchor_in_excerpt(excerpt_id, completion.replace_range.start) + .unwrap(); + let end = snapshot + .anchor_in_excerpt(excerpt_id, completion.replace_range.end) + .unwrap(); + editor.edit([(start..end, completion.new_text)], cx); + (completion.confirm.unwrap())(CompletionIntent::Complete, window, cx); + }); + + cx.run_until_parked(); + + // Backspace over the inserted crease (and the following space). + editor.update_in(cx, |editor, window, cx| { + editor.backspace(&Default::default(), window, cx); + editor.backspace(&Default::default(), window, cx); + }); + + let (content, _) = message_editor + .update_in(cx, |message_editor, window, cx| { + message_editor.contents(window, cx) + }) + .await + .unwrap(); + + // We don't send a resource link for the deleted crease. + pretty_assertions::assert_matches!(content.as_slice(), [acp::ContentBlock::Text { .. }]); + } + + struct MessageEditorItem(Entity); + + impl Item for MessageEditorItem { + type Event = (); + + fn include_in_nav_history() -> bool { + false + } + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + "Test".into() + } + } + + impl EventEmitter<()> for MessageEditorItem {} + + impl Focusable for MessageEditorItem { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.0.read(cx).focus_handle(cx) + } + } + + impl Render for MessageEditorItem { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + self.0.clone().into_any_element() + } + } + + #[gpui::test] + async fn test_context_completion_provider(cx: &mut TestAppContext) { + init_test(cx); + + let app_state = cx.update(AppState::test); + + cx.update(|cx| { + language::init(cx); + editor::init(cx); + workspace::init(app_state.clone(), cx); + Project::init_settings(cx); + }); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/dir"), + json!({ + "editor": "", + "a": { + "one.txt": "1", + "two.txt": "2", + "three.txt": "3", + "four.txt": "4" + }, + "b": { + "five.txt": "5", + "six.txt": "6", + "seven.txt": "7", + "eight.txt": "8", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; + let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let workspace = window.root(cx).unwrap(); + + let worktree = project.update(cx, |project, cx| { + let mut worktrees = project.worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + worktrees.pop().unwrap() + }); + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); + + let mut cx = VisualTestContext::from_window(*window, cx); + + let paths = vec![ + path!("a/one.txt"), + path!("a/two.txt"), + path!("a/three.txt"), + path!("a/four.txt"), + path!("b/five.txt"), + path!("b/six.txt"), + path!("b/seven.txt"), + path!("b/eight.txt"), + ]; + + let mut opened_editors = Vec::new(); + for path in paths { + let buffer = workspace + .update_in(&mut cx, |workspace, window, cx| { + workspace.open_path( + ProjectPath { + worktree_id, + path: Path::new(path).into(), + }, + None, + false, + window, + cx, + ) + }) + .await + .unwrap(); + opened_editors.push(buffer); + } + + let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + + let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { + let workspace_handle = cx.weak_entity(); + let message_editor = cx.new(|cx| { + MessageEditor::new( + workspace_handle, + project.clone(), + history_store.clone(), + None, + "Test", + false, + EditorMode::AutoHeight { + max_lines: None, + min_lines: 1, + }, + window, + cx, + ) + }); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item( + Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))), + true, + true, + None, + window, + cx, + ); + }); + message_editor.read(cx).focus_handle(cx).focus(window); + let editor = message_editor.read(cx).editor().clone(); + (message_editor, editor) + }); + + cx.simulate_input("Lorem @"); + + editor.update_in(&mut cx, |editor, window, cx| { + assert_eq!(editor.text(cx), "Lorem @"); + assert!(editor.has_visible_completions_menu()); + + // Only files since we have default capabilities + assert_eq!( + current_completion_labels(editor), + &[ + "eight.txt dir/b/", + "seven.txt dir/b/", + "six.txt dir/b/", + "five.txt dir/b/", + ] + ); + editor.set_text("", window, cx); + }); + + message_editor.update(&mut cx, |editor, _cx| { + // Enable all prompt capabilities + editor.set_prompt_capabilities(acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }); + }); + + cx.simulate_input("Lorem "); + + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "Lorem "); + assert!(!editor.has_visible_completions_menu()); + }); + + cx.simulate_input("@"); + + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "Lorem @"); + assert!(editor.has_visible_completions_menu()); + assert_eq!( + current_completion_labels(editor), + &[ + "eight.txt dir/b/", + "seven.txt dir/b/", + "six.txt dir/b/", + "five.txt dir/b/", + "Files & Directories", + "Symbols", + "Threads", + "Fetch" + ] + ); + }); + + // Select and confirm "File" + editor.update_in(&mut cx, |editor, window, cx| { + assert!(editor.has_visible_completions_menu()); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + cx.run_until_parked(); + + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "Lorem @file "); + assert!(editor.has_visible_completions_menu()); + }); + + cx.simulate_input("one"); + + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "Lorem @file one"); + assert!(editor.has_visible_completions_menu()); + assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + assert!(editor.has_visible_completions_menu()); + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + let url_one = uri!("file:///dir/a/one.txt"); + editor.update(&mut cx, |editor, cx| { + let text = editor.text(cx); + assert_eq!(text, format!("Lorem [@one.txt]({url_one}) ")); + assert!(!editor.has_visible_completions_menu()); + assert_eq!(fold_ranges(editor, cx).len(), 1); + }); + + let all_prompt_capabilities = acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }; + + let contents = message_editor + .update_in(&mut cx, |message_editor, window, cx| { + message_editor.mention_set().contents( + &project, + None, + &all_prompt_capabilities, + window, + cx, + ) + }) + .await + .unwrap() + .into_values() + .collect::>(); + + { + let [Mention::Text { content, uri, .. }] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(content, "1"); + pretty_assertions::assert_eq!(uri, &url_one.parse::().unwrap()); + } + + let contents = message_editor + .update_in(&mut cx, |message_editor, window, cx| { + message_editor.mention_set().contents( + &project, + None, + &acp::PromptCapabilities::default(), + window, + cx, + ) + }) + .await + .unwrap() + .into_values() + .collect::>(); + + { + let [Mention::UriOnly(uri)] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(uri, &url_one.parse::().unwrap()); + } + + cx.simulate_input(" "); + + editor.update(&mut cx, |editor, cx| { + let text = editor.text(cx); + assert_eq!(text, format!("Lorem [@one.txt]({url_one}) ")); + assert!(!editor.has_visible_completions_menu()); + assert_eq!(fold_ranges(editor, cx).len(), 1); + }); + + cx.simulate_input("Ipsum "); + + editor.update(&mut cx, |editor, cx| { + let text = editor.text(cx); + assert_eq!(text, format!("Lorem [@one.txt]({url_one}) Ipsum "),); + assert!(!editor.has_visible_completions_menu()); + assert_eq!(fold_ranges(editor, cx).len(), 1); + }); + + cx.simulate_input("@file "); + + editor.update(&mut cx, |editor, cx| { + let text = editor.text(cx); + assert_eq!(text, format!("Lorem [@one.txt]({url_one}) Ipsum @file "),); + assert!(editor.has_visible_completions_menu()); + assert_eq!(fold_ranges(editor, cx).len(), 1); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + cx.run_until_parked(); + + let contents = message_editor + .update_in(&mut cx, |message_editor, window, cx| { + message_editor.mention_set().contents( + &project, + None, + &all_prompt_capabilities, + window, + cx, + ) + }) + .await + .unwrap() + .into_values() + .collect::>(); + + let url_eight = uri!("file:///dir/b/eight.txt"); + + { + let [_, Mention::Text { content, uri, .. }] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(content, "8"); + pretty_assertions::assert_eq!(uri, &url_eight.parse::().unwrap()); + } + + editor.update(&mut cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) ") + ); + assert!(!editor.has_visible_completions_menu()); + assert_eq!(fold_ranges(editor, cx).len(), 2); + }); + + let plain_text_language = Arc::new(language::Language::new( + language::LanguageConfig { + name: "Plain Text".into(), + matcher: language::LanguageMatcher { + path_suffixes: vec!["txt".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + )); + + // Register the language and fake LSP + let language_registry = project.read_with(&cx, |project, _| project.languages().clone()); + language_registry.add(plain_text_language); + + let mut fake_language_servers = language_registry.register_fake_lsp( + "Plain Text", + language::FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + workspace_symbol_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + // Open the buffer to trigger LSP initialization + let buffer = project + .update(&mut cx, |project, cx| { + project.open_local_buffer(path!("/dir/a/one.txt"), cx) + }) + .await + .unwrap(); + + // Register the buffer with language servers + let _handle = project.update(&mut cx, |project, cx| { + project.register_buffer_with_language_servers(&buffer, cx) + }); + + cx.run_until_parked(); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.set_request_handler::( + move |_, _| async move { + Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![ + #[allow(deprecated)] + lsp::SymbolInformation { + name: "MySymbol".into(), + location: lsp::Location { + uri: lsp::Url::from_file_path(path!("/dir/a/one.txt")).unwrap(), + range: lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(0, 1), + ), + }, + kind: lsp::SymbolKind::CONSTANT, + tags: None, + container_name: None, + deprecated: None, + }, + ]))) + }, + ); + + cx.simulate_input("@symbol "); + + editor.update(&mut cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) @symbol ") + ); + assert!(editor.has_visible_completions_menu()); + assert_eq!(current_completion_labels(editor), &["MySymbol"]); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + let contents = message_editor + .update_in(&mut cx, |message_editor, window, cx| { + message_editor.mention_set().contents( + &project, + None, + &all_prompt_capabilities, + window, + cx, + ) + }) + .await + .unwrap() + .into_values() + .collect::>(); + + { + let [_, _, Mention::Text { content, uri, .. }] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(content, "1"); + pretty_assertions::assert_eq!( + uri, + &format!("{url_one}?symbol=MySymbol#L1:1") + .parse::() + .unwrap() + ); + } + + cx.run_until_parked(); + + editor.read_with(&cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ") + ); + }); + } + + fn fold_ranges(editor: &Editor, cx: &mut App) -> Vec> { + let snapshot = editor.buffer().read(cx).snapshot(cx); + editor.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .folds_in_range(0..snapshot.len()) + .map(|fold| fold.range.to_point(&snapshot)) + .collect() + }) + } + + fn current_completion_labels(editor: &Editor) -> Vec { + let completions = editor.current_completions().expect("Missing completions"); + completions + .into_iter() + .map(|completion| completion.label.text) + .collect::>() + } +} diff --git a/crates/agent_ui/src/acp/message_history.rs b/crates/agent_ui/src/acp/message_history.rs deleted file mode 100644 index c6106c7578230e60576cdeb48318012b52e76e46..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/acp/message_history.rs +++ /dev/null @@ -1,92 +0,0 @@ -pub struct MessageHistory { - items: Vec, - current: Option, -} - -impl Default for MessageHistory { - fn default() -> Self { - MessageHistory { - items: Vec::new(), - current: None, - } - } -} - -impl MessageHistory { - pub fn push(&mut self, message: T) { - self.current.take(); - self.items.push(message); - } - - pub fn reset_position(&mut self) { - self.current.take(); - } - - pub fn prev(&mut self) -> Option<&T> { - if self.items.is_empty() { - return None; - } - - let new_ix = self - .current - .get_or_insert(self.items.len()) - .saturating_sub(1); - - self.current = Some(new_ix); - self.items.get(new_ix) - } - - pub fn next(&mut self) -> Option<&T> { - let current = self.current.as_mut()?; - *current += 1; - - self.items.get(*current).or_else(|| { - self.current.take(); - None - }) - } - - #[cfg(test)] - pub fn items(&self) -> &[T] { - &self.items - } -} -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_prev_next() { - let mut history = MessageHistory::default(); - - // Test empty history - assert_eq!(history.prev(), None); - assert_eq!(history.next(), None); - - // Add some messages - history.push("first"); - history.push("second"); - history.push("third"); - - // Test prev navigation - assert_eq!(history.prev(), Some(&"third")); - assert_eq!(history.prev(), Some(&"second")); - assert_eq!(history.prev(), Some(&"first")); - assert_eq!(history.prev(), Some(&"first")); - - assert_eq!(history.next(), Some(&"second")); - - // Test mixed navigation - history.push("fourth"); - assert_eq!(history.prev(), Some(&"fourth")); - assert_eq!(history.prev(), Some(&"third")); - assert_eq!(history.next(), Some(&"fourth")); - assert_eq!(history.next(), None); - - // Test that push resets navigation - history.prev(); - history.prev(); - history.push("fifth"); - assert_eq!(history.prev(), Some(&"fifth")); - } -} diff --git a/crates/agent_ui/src/acp/model_selector.rs b/crates/agent_ui/src/acp/model_selector.rs new file mode 100644 index 0000000000000000000000000000000000000000..77c88c461d6e6fadcefd8eb7319bbbe2ff05fef4 --- /dev/null +++ b/crates/agent_ui/src/acp/model_selector.rs @@ -0,0 +1,472 @@ +use std::{cmp::Reverse, rc::Rc, sync::Arc}; + +use acp_thread::{AgentModelInfo, AgentModelList, AgentModelSelector}; +use agent_client_protocol as acp; +use anyhow::Result; +use collections::IndexMap; +use futures::FutureExt; +use fuzzy::{StringMatchCandidate, match_strings}; +use gpui::{Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, WeakEntity}; +use ordered_float::OrderedFloat; +use picker::{Picker, PickerDelegate}; +use ui::{ + AnyElement, App, Context, IntoElement, ListItem, ListItemSpacing, SharedString, Window, + prelude::*, rems, +}; +use util::ResultExt; + +pub type AcpModelSelector = Picker; + +pub fn acp_model_selector( + session_id: acp::SessionId, + selector: Rc, + window: &mut Window, + cx: &mut Context, +) -> AcpModelSelector { + let delegate = AcpModelPickerDelegate::new(session_id, selector, window, cx); + Picker::list(delegate, window, cx) + .show_scrollbar(true) + .width(rems(20.)) + .max_height(Some(rems(20.).into())) +} + +enum AcpModelPickerEntry { + Separator(SharedString), + Model(AgentModelInfo), +} + +pub struct AcpModelPickerDelegate { + session_id: acp::SessionId, + selector: Rc, + filtered_entries: Vec, + models: Option, + selected_index: usize, + selected_model: Option, + _refresh_models_task: Task<()>, +} + +impl AcpModelPickerDelegate { + fn new( + session_id: acp::SessionId, + selector: Rc, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let mut rx = selector.watch(cx); + let refresh_models_task = cx.spawn_in(window, { + let session_id = session_id.clone(); + async move |this, cx| { + async fn refresh( + this: &WeakEntity>, + session_id: &acp::SessionId, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let (models_task, selected_model_task) = this.update(cx, |this, cx| { + ( + this.delegate.selector.list_models(cx), + this.delegate.selector.selected_model(session_id, cx), + ) + })?; + + let (models, selected_model) = futures::join!(models_task, selected_model_task); + + this.update_in(cx, |this, window, cx| { + this.delegate.models = models.ok(); + this.delegate.selected_model = selected_model.ok(); + this.delegate.update_matches(this.query(cx), window, cx) + })? + .await; + + Ok(()) + } + + refresh(&this, &session_id, cx).await.log_err(); + while let Ok(()) = rx.recv().await { + refresh(&this, &session_id, cx).await.log_err(); + } + } + }); + + Self { + session_id, + selector, + filtered_entries: Vec::new(), + models: None, + selected_model: None, + selected_index: 0, + _refresh_models_task: refresh_models_task, + } + } + + pub fn active_model(&self) -> Option<&AgentModelInfo> { + self.selected_model.as_ref() + } +} + +impl PickerDelegate for AcpModelPickerDelegate { + type ListItem = AnyElement; + + fn match_count(&self) -> usize { + self.filtered_entries.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context>) { + self.selected_index = ix.min(self.filtered_entries.len().saturating_sub(1)); + cx.notify(); + } + + fn can_select( + &mut self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) -> bool { + match self.filtered_entries.get(ix) { + Some(AcpModelPickerEntry::Model(_)) => true, + Some(AcpModelPickerEntry::Separator(_)) | None => false, + } + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Select a model…".into() + } + + fn update_matches( + &mut self, + query: String, + window: &mut Window, + cx: &mut Context>, + ) -> Task<()> { + cx.spawn_in(window, async move |this, cx| { + let filtered_models = match this + .read_with(cx, |this, cx| { + this.delegate.models.clone().map(move |models| { + fuzzy_search(models, query, cx.background_executor().clone()) + }) + }) + .ok() + .flatten() + { + Some(task) => task.await, + None => AgentModelList::Flat(vec![]), + }; + + this.update_in(cx, |this, window, cx| { + this.delegate.filtered_entries = + info_list_to_picker_entries(filtered_models).collect(); + // Finds the currently selected model in the list + let new_index = this + .delegate + .selected_model + .as_ref() + .and_then(|selected| { + this.delegate.filtered_entries.iter().position(|entry| { + if let AcpModelPickerEntry::Model(model_info) = entry { + model_info.id == selected.id + } else { + false + } + }) + }) + .unwrap_or(0); + this.set_selected_index(new_index, Some(picker::Direction::Down), true, window, cx); + cx.notify(); + }) + .ok(); + }) + } + + fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { + if let Some(AcpModelPickerEntry::Model(model_info)) = + self.filtered_entries.get(self.selected_index) + { + self.selector + .select_model(self.session_id.clone(), model_info.id.clone(), cx) + .detach_and_log_err(cx); + self.selected_model = Some(model_info.clone()); + let current_index = self.selected_index; + self.set_selected_index(current_index, window, cx); + + cx.emit(DismissEvent); + } + } + + fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { + cx.emit(DismissEvent); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut Window, + cx: &mut Context>, + ) -> Option { + match self.filtered_entries.get(ix)? { + AcpModelPickerEntry::Separator(title) => Some( + div() + .px_2() + .pb_1() + .when(ix > 1, |this| { + this.mt_1() + .pt_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + }) + .child( + Label::new(title) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + .into_any_element(), + ), + AcpModelPickerEntry::Model(model_info) => { + let is_selected = Some(model_info) == self.selected_model.as_ref(); + + let model_icon_color = if is_selected { + Color::Accent + } else { + Color::Muted + }; + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(selected) + .start_slot::(model_info.icon.map(|icon| { + Icon::new(icon) + .color(model_icon_color) + .size(IconSize::Small) + })) + .child( + h_flex() + .w_full() + .pl_0p5() + .gap_1p5() + .w(px(240.)) + .child(Label::new(model_info.name.clone()).truncate()), + ) + .end_slot(div().pr_3().when(is_selected, |this| { + this.child( + Icon::new(IconName::Check) + .color(Color::Accent) + .size(IconSize::Small), + ) + })) + .into_any_element(), + ) + } + } + } + + fn render_footer( + &self, + _: &mut Window, + cx: &mut Context>, + ) -> Option { + Some( + h_flex() + .w_full() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .p_1() + .gap_4() + .justify_between() + .child( + Button::new("configure", "Configure") + .icon(IconName::Settings) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .on_click(|_, window, cx| { + window.dispatch_action( + zed_actions::agent::OpenSettings.boxed_clone(), + cx, + ); + }), + ) + .into_any(), + ) + } +} + +fn info_list_to_picker_entries( + model_list: AgentModelList, +) -> impl Iterator { + match model_list { + AgentModelList::Flat(list) => { + itertools::Either::Left(list.into_iter().map(AcpModelPickerEntry::Model)) + } + AgentModelList::Grouped(index_map) => { + itertools::Either::Right(index_map.into_iter().flat_map(|(group_name, models)| { + std::iter::once(AcpModelPickerEntry::Separator(group_name.0)) + .chain(models.into_iter().map(AcpModelPickerEntry::Model)) + })) + } + } +} + +async fn fuzzy_search( + model_list: AgentModelList, + query: String, + executor: BackgroundExecutor, +) -> AgentModelList { + async fn fuzzy_search_list( + model_list: Vec, + query: &str, + executor: BackgroundExecutor, + ) -> Vec { + let candidates = model_list + .iter() + .enumerate() + .map(|(ix, model)| { + StringMatchCandidate::new(ix, &format!("{}/{}", model.id, model.name)) + }) + .collect::>(); + let mut matches = match_strings( + &candidates, + query, + false, + true, + 100, + &Default::default(), + executor, + ) + .await; + + matches.sort_unstable_by_key(|mat| { + let candidate = &candidates[mat.candidate_id]; + (Reverse(OrderedFloat(mat.score)), candidate.id) + }); + + matches + .into_iter() + .map(|mat| model_list[mat.candidate_id].clone()) + .collect() + } + + match model_list { + AgentModelList::Flat(model_list) => { + AgentModelList::Flat(fuzzy_search_list(model_list, &query, executor).await) + } + AgentModelList::Grouped(index_map) => { + let groups = + futures::future::join_all(index_map.into_iter().map(|(group_name, models)| { + fuzzy_search_list(models, &query, executor.clone()) + .map(|results| (group_name, results)) + })) + .await; + AgentModelList::Grouped(IndexMap::from_iter( + groups + .into_iter() + .filter(|(_, results)| !results.is_empty()), + )) + } + } +} + +#[cfg(test)] +mod tests { + use gpui::TestAppContext; + + use super::*; + + fn create_model_list(grouped_models: Vec<(&str, Vec<&str>)>) -> AgentModelList { + AgentModelList::Grouped(IndexMap::from_iter(grouped_models.into_iter().map( + |(group, models)| { + ( + acp_thread::AgentModelGroupName(group.to_string().into()), + models + .into_iter() + .map(|model| acp_thread::AgentModelInfo { + id: acp_thread::AgentModelId(model.to_string().into()), + name: model.to_string().into(), + icon: None, + }) + .collect::>(), + ) + }, + ))) + } + + fn assert_models_eq(result: AgentModelList, expected: Vec<(&str, Vec<&str>)>) { + let AgentModelList::Grouped(groups) = result else { + panic!("Expected LanguageModelInfoList::Grouped, got {:?}", result); + }; + + assert_eq!( + groups.len(), + expected.len(), + "Number of groups doesn't match" + ); + + for (i, (expected_group, expected_models)) in expected.iter().enumerate() { + let (actual_group, actual_models) = groups.get_index(i).unwrap(); + assert_eq!( + actual_group.0.as_ref(), + *expected_group, + "Group at position {} doesn't match expected group", + i + ); + assert_eq!( + actual_models.len(), + expected_models.len(), + "Number of models in group {} doesn't match", + expected_group + ); + + for (j, expected_model_name) in expected_models.iter().enumerate() { + assert_eq!( + actual_models[j].name, *expected_model_name, + "Model at position {} in group {} doesn't match expected model", + j, expected_group + ); + } + } + } + + #[gpui::test] + async fn test_fuzzy_match(cx: &mut TestAppContext) { + let models = create_model_list(vec![ + ( + "zed", + vec![ + "Claude 3.7 Sonnet", + "Claude 3.7 Sonnet Thinking", + "gpt-4.1", + "gpt-4.1-nano", + ], + ), + ("openai", vec!["gpt-3.5-turbo", "gpt-4.1", "gpt-4.1-nano"]), + ("ollama", vec!["mistral", "deepseek"]), + ]); + + // Results should preserve models order whenever possible. + // In the case below, `zed/gpt-4.1` and `openai/gpt-4.1` have identical + // similarity scores, but `zed/gpt-4.1` was higher in the models list, + // so it should appear first in the results. + let results = fuzzy_search(models.clone(), "41".into(), cx.executor()).await; + assert_models_eq( + results, + vec![ + ("zed", vec!["gpt-4.1", "gpt-4.1-nano"]), + ("openai", vec!["gpt-4.1", "gpt-4.1-nano"]), + ], + ); + + // Fuzzy search + let results = fuzzy_search(models.clone(), "4n".into(), cx.executor()).await; + assert_models_eq( + results, + vec![ + ("zed", vec!["gpt-4.1-nano"]), + ("openai", vec!["gpt-4.1-nano"]), + ], + ); + } +} diff --git a/crates/agent_ui/src/acp/model_selector_popover.rs b/crates/agent_ui/src/acp/model_selector_popover.rs new file mode 100644 index 0000000000000000000000000000000000000000..e52101113a61c7379be54e25f1784ac16b660200 --- /dev/null +++ b/crates/agent_ui/src/acp/model_selector_popover.rs @@ -0,0 +1,85 @@ +use std::rc::Rc; + +use acp_thread::AgentModelSelector; +use agent_client_protocol as acp; +use gpui::{Entity, FocusHandle}; +use picker::popover_menu::PickerPopoverMenu; +use ui::{ + ButtonLike, Context, IntoElement, PopoverMenuHandle, SharedString, Tooltip, Window, prelude::*, +}; +use zed_actions::agent::ToggleModelSelector; + +use crate::acp::{AcpModelSelector, model_selector::acp_model_selector}; + +pub struct AcpModelSelectorPopover { + selector: Entity, + menu_handle: PopoverMenuHandle, + focus_handle: FocusHandle, +} + +impl AcpModelSelectorPopover { + pub(crate) fn new( + session_id: acp::SessionId, + selector: Rc, + menu_handle: PopoverMenuHandle, + focus_handle: FocusHandle, + window: &mut Window, + cx: &mut Context, + ) -> Self { + Self { + selector: cx.new(move |cx| acp_model_selector(session_id, selector, window, cx)), + menu_handle, + focus_handle, + } + } + + pub fn toggle(&self, window: &mut Window, cx: &mut Context) { + self.menu_handle.toggle(window, cx); + } +} + +impl Render for AcpModelSelectorPopover { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let model = self.selector.read(cx).delegate.active_model(); + let model_name = model + .as_ref() + .map(|model| model.name.clone()) + .unwrap_or_else(|| SharedString::from("Select a Model")); + + let model_icon = model.as_ref().and_then(|model| model.icon); + + let focus_handle = self.focus_handle.clone(); + + PickerPopoverMenu::new( + self.selector.clone(), + ButtonLike::new("active-model") + .when_some(model_icon, |this, icon| { + this.child(Icon::new(icon).color(Color::Muted).size(IconSize::XSmall)) + }) + .child( + Label::new(model_name) + .color(Color::Muted) + .size(LabelSize::Small) + .ml_0p5(), + ) + .child( + Icon::new(IconName::ChevronDown) + .color(Color::Muted) + .size(IconSize::XSmall), + ), + move |window, cx| { + Tooltip::for_action_in( + "Change Model", + &ToggleModelSelector, + &focus_handle, + window, + cx, + ) + }, + gpui::Corner::BottomRight, + cx, + ) + .with_handle(self.menu_handle.clone()) + .render(window, cx) + } +} diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs new file mode 100644 index 0000000000000000000000000000000000000000..d76969378ce2dd30e891618079d7ce60664e2617 --- /dev/null +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -0,0 +1,902 @@ +use crate::acp::AcpThreadView; +use crate::{AgentPanel, RemoveSelectedThread}; +use agent2::{HistoryEntry, HistoryStore}; +use chrono::{Datelike as _, Local, NaiveDate, TimeDelta}; +use editor::{Editor, EditorEvent}; +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{ + App, Empty, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Stateful, Task, + UniformListScrollHandle, WeakEntity, Window, uniform_list, +}; +use std::{fmt::Display, ops::Range, sync::Arc}; +use time::{OffsetDateTime, UtcOffset}; +use ui::{ + HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Scrollbar, ScrollbarState, + Tooltip, prelude::*, +}; +use util::ResultExt; + +pub struct AcpThreadHistory { + pub(crate) history_store: Entity, + scroll_handle: UniformListScrollHandle, + selected_index: usize, + hovered_index: Option, + search_editor: Entity, + all_entries: Arc>, + // When the search is empty, we display date separators between history entries + // This vector contains an enum of either a separator or an actual entry + separated_items: Vec, + // Maps entry indexes to list item indexes + separated_item_indexes: Vec, + _separated_items_task: Option>, + search_state: SearchState, + scrollbar_visibility: bool, + scrollbar_state: ScrollbarState, + local_timezone: UtcOffset, + _subscriptions: Vec, +} + +enum SearchState { + Empty, + Searching { + query: SharedString, + _task: Task<()>, + }, + Searched { + query: SharedString, + matches: Vec, + }, +} + +enum ListItemType { + BucketSeparator(TimeBucket), + Entry { + index: usize, + format: EntryTimeFormat, + }, +} + +pub enum ThreadHistoryEvent { + Open(HistoryEntry), +} + +impl EventEmitter for AcpThreadHistory {} + +impl AcpThreadHistory { + pub(crate) fn new( + history_store: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let search_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search threads...", cx); + editor + }); + + let search_editor_subscription = + cx.subscribe(&search_editor, |this, search_editor, event, cx| { + if let EditorEvent::BufferEdited = event { + let query = search_editor.read(cx).text(cx); + this.search(query.into(), cx); + } + }); + + let history_store_subscription = cx.observe(&history_store, |this, _, cx| { + this.update_all_entries(cx); + }); + + let scroll_handle = UniformListScrollHandle::default(); + let scrollbar_state = ScrollbarState::new(scroll_handle.clone()); + + let mut this = Self { + history_store, + scroll_handle, + selected_index: 0, + hovered_index: None, + search_state: SearchState::Empty, + all_entries: Default::default(), + separated_items: Default::default(), + separated_item_indexes: Default::default(), + search_editor, + scrollbar_visibility: true, + scrollbar_state, + local_timezone: UtcOffset::from_whole_seconds( + chrono::Local::now().offset().local_minus_utc(), + ) + .unwrap(), + _subscriptions: vec![search_editor_subscription, history_store_subscription], + _separated_items_task: None, + }; + this.update_all_entries(cx); + this + } + + fn update_all_entries(&mut self, cx: &mut Context) { + let new_entries: Arc> = self + .history_store + .update(cx, |store, cx| store.entries(cx)) + .into(); + + self._separated_items_task.take(); + + let mut items = Vec::with_capacity(new_entries.len() + 1); + let mut indexes = Vec::with_capacity(new_entries.len() + 1); + + let bg_task = cx.background_spawn(async move { + let mut bucket = None; + let today = Local::now().naive_local().date(); + + for (index, entry) in new_entries.iter().enumerate() { + let entry_date = entry + .updated_at() + .with_timezone(&Local) + .naive_local() + .date(); + let entry_bucket = TimeBucket::from_dates(today, entry_date); + + if Some(entry_bucket) != bucket { + bucket = Some(entry_bucket); + items.push(ListItemType::BucketSeparator(entry_bucket)); + } + + indexes.push(items.len() as u32); + items.push(ListItemType::Entry { + index, + format: entry_bucket.into(), + }); + } + (new_entries, items, indexes) + }); + + let task = cx.spawn(async move |this, cx| { + let (new_entries, items, indexes) = bg_task.await; + this.update(cx, |this, cx| { + let previously_selected_entry = + this.all_entries.get(this.selected_index).map(|e| e.id()); + + this.all_entries = new_entries; + this.separated_items = items; + this.separated_item_indexes = indexes; + + match &this.search_state { + SearchState::Empty => { + if this.selected_index >= this.all_entries.len() { + this.set_selected_entry_index( + this.all_entries.len().saturating_sub(1), + cx, + ); + } else if let Some(prev_id) = previously_selected_entry + && let Some(new_ix) = this + .all_entries + .iter() + .position(|probe| probe.id() == prev_id) + { + this.set_selected_entry_index(new_ix, cx); + } + } + SearchState::Searching { query, .. } | SearchState::Searched { query, .. } => { + this.search(query.clone(), cx); + } + } + + cx.notify(); + }) + .log_err(); + }); + self._separated_items_task = Some(task); + } + + fn search(&mut self, query: SharedString, cx: &mut Context) { + if query.is_empty() { + self.search_state = SearchState::Empty; + cx.notify(); + return; + } + + let all_entries = self.all_entries.clone(); + + let fuzzy_search_task = cx.background_spawn({ + let query = query.clone(); + let executor = cx.background_executor().clone(); + async move { + let mut candidates = Vec::with_capacity(all_entries.len()); + + for (idx, entry) in all_entries.iter().enumerate() { + candidates.push(StringMatchCandidate::new(idx, entry.title())); + } + + const MAX_MATCHES: usize = 100; + + fuzzy::match_strings( + &candidates, + &query, + false, + true, + MAX_MATCHES, + &Default::default(), + executor, + ) + .await + } + }); + + let task = cx.spawn({ + let query = query.clone(); + async move |this, cx| { + let matches = fuzzy_search_task.await; + + this.update(cx, |this, cx| { + let SearchState::Searching { + query: current_query, + _task, + } = &this.search_state + else { + return; + }; + + if &query == current_query { + this.search_state = SearchState::Searched { + query: query.clone(), + matches, + }; + + this.set_selected_entry_index(0, cx); + cx.notify(); + }; + }) + .log_err(); + } + }); + + self.search_state = SearchState::Searching { query, _task: task }; + cx.notify(); + } + + fn matched_count(&self) -> usize { + match &self.search_state { + SearchState::Empty => self.all_entries.len(), + SearchState::Searching { .. } => 0, + SearchState::Searched { matches, .. } => matches.len(), + } + } + + fn list_item_count(&self) -> usize { + match &self.search_state { + SearchState::Empty => self.separated_items.len(), + SearchState::Searching { .. } => 0, + SearchState::Searched { matches, .. } => matches.len(), + } + } + + fn search_produced_no_matches(&self) -> bool { + match &self.search_state { + SearchState::Empty => false, + SearchState::Searching { .. } => false, + SearchState::Searched { matches, .. } => matches.is_empty(), + } + } + + fn get_match(&self, ix: usize) -> Option<&HistoryEntry> { + match &self.search_state { + SearchState::Empty => self.all_entries.get(ix), + SearchState::Searching { .. } => None, + SearchState::Searched { matches, .. } => matches + .get(ix) + .and_then(|m| self.all_entries.get(m.candidate_id)), + } + } + + pub fn select_previous( + &mut self, + _: &menu::SelectPrevious, + _window: &mut Window, + cx: &mut Context, + ) { + let count = self.matched_count(); + if count > 0 { + if self.selected_index == 0 { + self.set_selected_entry_index(count - 1, cx); + } else { + self.set_selected_entry_index(self.selected_index - 1, cx); + } + } + } + + pub fn select_next( + &mut self, + _: &menu::SelectNext, + _window: &mut Window, + cx: &mut Context, + ) { + let count = self.matched_count(); + if count > 0 { + if self.selected_index == count - 1 { + self.set_selected_entry_index(0, cx); + } else { + self.set_selected_entry_index(self.selected_index + 1, cx); + } + } + } + + fn select_first( + &mut self, + _: &menu::SelectFirst, + _window: &mut Window, + cx: &mut Context, + ) { + let count = self.matched_count(); + if count > 0 { + self.set_selected_entry_index(0, cx); + } + } + + fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { + let count = self.matched_count(); + if count > 0 { + self.set_selected_entry_index(count - 1, cx); + } + } + + fn set_selected_entry_index(&mut self, entry_index: usize, cx: &mut Context) { + self.selected_index = entry_index; + + let scroll_ix = match self.search_state { + SearchState::Empty | SearchState::Searching { .. } => self + .separated_item_indexes + .get(entry_index) + .map(|ix| *ix as usize) + .unwrap_or(entry_index + 1), + SearchState::Searched { .. } => entry_index, + }; + + self.scroll_handle + .scroll_to_item(scroll_ix, ScrollStrategy::Top); + + cx.notify(); + } + + fn render_scrollbar(&self, cx: &mut Context) -> Option> { + if !(self.scrollbar_visibility || self.scrollbar_state.is_dragging()) { + return None; + } + + Some( + div() + .occlude() + .id("thread-history-scroll") + .h_full() + .bg(cx.theme().colors().panel_background.opacity(0.8)) + .border_l_1() + .border_color(cx.theme().colors().border_variant) + .absolute() + .right_1() + .top_0() + .bottom_0() + .w_4() + .pl_1() + .cursor_default() + .on_mouse_move(cx.listener(|_, _, _window, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, _window, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, _window, cx| { + cx.stop_propagation(); + }) + .on_scroll_wheel(cx.listener(|_, _, _window, cx| { + cx.notify(); + })) + .children(Scrollbar::vertical(self.scrollbar_state.clone())), + ) + } + + fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + self.confirm_entry(self.selected_index, cx); + } + + fn confirm_entry(&mut self, ix: usize, cx: &mut Context) { + let Some(entry) = self.get_match(ix) else { + return; + }; + cx.emit(ThreadHistoryEvent::Open(entry.clone())); + } + + fn remove_selected_thread( + &mut self, + _: &RemoveSelectedThread, + _window: &mut Window, + cx: &mut Context, + ) { + self.remove_thread(self.selected_index, cx) + } + + fn remove_thread(&mut self, ix: usize, cx: &mut Context) { + let Some(entry) = self.get_match(ix) else { + return; + }; + + let task = match entry { + HistoryEntry::AcpThread(thread) => self + .history_store + .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)), + HistoryEntry::TextThread(context) => self.history_store.update(cx, |this, cx| { + this.delete_text_thread(context.path.clone(), cx) + }), + }; + task.detach_and_log_err(cx); + } + + fn list_items( + &mut self, + range: Range, + _window: &mut Window, + cx: &mut Context, + ) -> Vec { + match &self.search_state { + SearchState::Empty => self + .separated_items + .get(range) + .iter() + .flat_map(|items| { + items + .iter() + .map(|item| self.render_list_item(item, vec![], cx)) + }) + .collect(), + SearchState::Searched { matches, .. } => matches[range] + .iter() + .filter_map(|m| { + let entry = self.all_entries.get(m.candidate_id)?; + Some(self.render_history_entry( + entry, + EntryTimeFormat::DateAndTime, + m.candidate_id, + m.positions.clone(), + cx, + )) + }) + .collect(), + SearchState::Searching { .. } => { + vec![] + } + } + } + + fn render_list_item( + &self, + item: &ListItemType, + highlight_positions: Vec, + cx: &Context, + ) -> AnyElement { + match item { + ListItemType::Entry { index, format } => match self.all_entries.get(*index) { + Some(entry) => self + .render_history_entry(entry, *format, *index, highlight_positions, cx) + .into_any(), + None => Empty.into_any_element(), + }, + ListItemType::BucketSeparator(bucket) => div() + .px(DynamicSpacing::Base06.rems(cx)) + .pt_2() + .pb_1() + .child( + Label::new(bucket.to_string()) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + .into_any_element(), + } + } + + fn render_history_entry( + &self, + entry: &HistoryEntry, + format: EntryTimeFormat, + list_entry_ix: usize, + highlight_positions: Vec, + cx: &Context, + ) -> AnyElement { + let selected = list_entry_ix == self.selected_index; + let hovered = Some(list_entry_ix) == self.hovered_index; + let timestamp = entry.updated_at().timestamp(); + let thread_timestamp = format.format_timestamp(timestamp, self.local_timezone); + + h_flex() + .w_full() + .pb_1() + .child( + ListItem::new(list_entry_ix) + .rounded() + .toggle_state(selected) + .spacing(ListItemSpacing::Sparse) + .start_slot( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child( + HighlightedLabel::new(entry.title(), highlight_positions) + .size(LabelSize::Small) + .truncate(), + ) + .child( + Label::new(thread_timestamp) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ) + .on_hover(cx.listener(move |this, is_hovered, _window, cx| { + if *is_hovered { + this.hovered_index = Some(list_entry_ix); + } else if this.hovered_index == Some(list_entry_ix) { + this.hovered_index = None; + } + + cx.notify(); + })) + .end_slot::(if hovered || selected { + Some( + IconButton::new("delete", IconName::Trash) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(move |window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx) + }) + .on_click(cx.listener(move |this, _, _, cx| { + this.remove_thread(list_entry_ix, cx) + })), + ) + } else { + None + }) + .on_click( + cx.listener(move |this, _, _, cx| this.confirm_entry(list_entry_ix, cx)), + ), + ) + .into_any_element() + } +} + +impl Focusable for AcpThreadHistory { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.search_editor.focus_handle(cx) + } +} + +impl Render for AcpThreadHistory { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + v_flex() + .key_context("ThreadHistory") + .size_full() + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::remove_selected_thread)) + .when(!self.all_entries.is_empty(), |parent| { + parent.child( + h_flex() + .h(px(41.)) // Match the toolbar perfectly + .w_full() + .py_1() + .px_2() + .gap_2() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::MagnifyingGlass) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(self.search_editor.clone()), + ) + }) + .child({ + let view = v_flex() + .id("list-container") + .relative() + .overflow_hidden() + .flex_grow(); + + if self.all_entries.is_empty() { + view.justify_center() + .child( + h_flex().w_full().justify_center().child( + Label::new("You don't have any past threads yet.") + .size(LabelSize::Small), + ), + ) + } else if self.search_produced_no_matches() { + view.justify_center().child( + h_flex().w_full().justify_center().child( + Label::new("No threads match your search.").size(LabelSize::Small), + ), + ) + } else { + view.pr_5() + .child( + uniform_list( + "thread-history", + self.list_item_count(), + cx.processor(|this, range: Range, window, cx| { + this.list_items(range, window, cx) + }), + ) + .p_1() + .track_scroll(self.scroll_handle.clone()) + .flex_grow(), + ) + .when_some(self.render_scrollbar(cx), |div, scrollbar| { + div.child(scrollbar) + }) + } + }) + } +} + +#[derive(IntoElement)] +pub struct AcpHistoryEntryElement { + entry: HistoryEntry, + thread_view: WeakEntity, + selected: bool, + hovered: bool, + on_hover: Box, +} + +impl AcpHistoryEntryElement { + pub fn new(entry: HistoryEntry, thread_view: WeakEntity) -> Self { + Self { + entry, + thread_view, + selected: false, + hovered: false, + on_hover: Box::new(|_, _, _| {}), + } + } + + pub fn hovered(mut self, hovered: bool) -> Self { + self.hovered = hovered; + self + } + + pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self { + self.on_hover = Box::new(on_hover); + self + } +} + +impl RenderOnce for AcpHistoryEntryElement { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let id = self.entry.id(); + let title = self.entry.title(); + let timestamp = self.entry.updated_at(); + + let formatted_time = { + let now = chrono::Utc::now(); + let duration = now.signed_duration_since(timestamp); + + if duration.num_days() > 0 { + format!("{}d", duration.num_days()) + } else if duration.num_hours() > 0 { + format!("{}h ago", duration.num_hours()) + } else if duration.num_minutes() > 0 { + format!("{}m ago", duration.num_minutes()) + } else { + "Just now".to_string() + } + }; + + ListItem::new(id) + .rounded() + .toggle_state(self.selected) + .spacing(ListItemSpacing::Sparse) + .start_slot( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child(Label::new(title).size(LabelSize::Small).truncate()) + .child( + Label::new(formatted_time) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ) + .on_hover(self.on_hover) + .end_slot::(if self.hovered || self.selected { + Some( + IconButton::new("delete", IconName::Trash) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(move |window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx) + }) + .on_click({ + let thread_view = self.thread_view.clone(); + let entry = self.entry.clone(); + + move |_event, _window, cx| { + if let Some(thread_view) = thread_view.upgrade() { + thread_view.update(cx, |thread_view, cx| { + thread_view.delete_history_entry(entry.clone(), cx); + }); + } + } + }), + ) + } else { + None + }) + .on_click({ + let thread_view = self.thread_view.clone(); + let entry = self.entry; + + move |_event, window, cx| { + if let Some(workspace) = thread_view + .upgrade() + .and_then(|view| view.read(cx).workspace().upgrade()) + { + match &entry { + HistoryEntry::AcpThread(thread_metadata) => { + if let Some(panel) = workspace.read(cx).panel::(cx) { + panel.update(cx, |panel, cx| { + panel.load_agent_thread( + thread_metadata.clone(), + window, + cx, + ); + }); + } + } + HistoryEntry::TextThread(context) => { + if let Some(panel) = workspace.read(cx).panel::(cx) { + panel.update(cx, |panel, cx| { + panel + .open_saved_prompt_editor( + context.path.clone(), + window, + cx, + ) + .detach_and_log_err(cx); + }); + } + } + } + } + } + }) + } +} + +#[derive(Clone, Copy)] +pub enum EntryTimeFormat { + DateAndTime, + TimeOnly, +} + +impl EntryTimeFormat { + fn format_timestamp(&self, timestamp: i64, timezone: UtcOffset) -> String { + let timestamp = OffsetDateTime::from_unix_timestamp(timestamp).unwrap(); + + match self { + EntryTimeFormat::DateAndTime => time_format::format_localized_timestamp( + timestamp, + OffsetDateTime::now_utc(), + timezone, + time_format::TimestampFormat::EnhancedAbsolute, + ), + EntryTimeFormat::TimeOnly => time_format::format_time(timestamp), + } + } +} + +impl From for EntryTimeFormat { + fn from(bucket: TimeBucket) -> Self { + match bucket { + TimeBucket::Today => EntryTimeFormat::TimeOnly, + TimeBucket::Yesterday => EntryTimeFormat::TimeOnly, + TimeBucket::ThisWeek => EntryTimeFormat::DateAndTime, + TimeBucket::PastWeek => EntryTimeFormat::DateAndTime, + TimeBucket::All => EntryTimeFormat::DateAndTime, + } + } +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +enum TimeBucket { + Today, + Yesterday, + ThisWeek, + PastWeek, + All, +} + +impl TimeBucket { + fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { + if date == reference { + return TimeBucket::Today; + } + + if date == reference - TimeDelta::days(1) { + return TimeBucket::Yesterday; + } + + let week = date.iso_week(); + + if reference.iso_week() == week { + return TimeBucket::ThisWeek; + } + + let last_week = (reference - TimeDelta::days(7)).iso_week(); + + if week == last_week { + return TimeBucket::PastWeek; + } + + TimeBucket::All + } +} + +impl Display for TimeBucket { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TimeBucket::Today => write!(f, "Today"), + TimeBucket::Yesterday => write!(f, "Yesterday"), + TimeBucket::ThisWeek => write!(f, "This Week"), + TimeBucket::PastWeek => write!(f, "Past Week"), + TimeBucket::All => write!(f, "All"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::NaiveDate; + + #[test] + fn test_time_bucket_from_dates() { + let today = NaiveDate::from_ymd_opt(2023, 1, 15).unwrap(); + + let date = today; + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::Today); + + let date = NaiveDate::from_ymd_opt(2023, 1, 14).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::Yesterday); + + let date = NaiveDate::from_ymd_opt(2023, 1, 13).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::ThisWeek); + + let date = NaiveDate::from_ymd_opt(2023, 1, 11).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::ThisWeek); + + let date = NaiveDate::from_ymd_opt(2023, 1, 8).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::PastWeek); + + let date = NaiveDate::from_ymd_opt(2023, 1, 5).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::PastWeek); + + // All: not in this week or last week + let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::All); + + // Test year boundary cases + let new_year = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); + + let date = NaiveDate::from_ymd_opt(2022, 12, 31).unwrap(); + assert_eq!( + TimeBucket::from_dates(new_year, date), + TimeBucket::Yesterday + ); + + let date = NaiveDate::from_ymd_opt(2022, 12, 28).unwrap(); + assert_eq!(TimeBucket::from_dates(new_year, date), TimeBucket::ThisWeek); + } +} diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 3d1fbba45d763fcf293844a67bd9fc3ab9fbe26b..7e330b7e6f0f894542e7b8f1d52841b093b8715e 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -1,75 +1,267 @@ +use acp_thread::{ + AcpThread, AcpThreadEvent, AgentThreadEntry, AssistantMessage, AssistantMessageChunk, + AuthRequired, LoadError, MentionUri, RetryStatus, ThreadStatus, ToolCall, ToolCallContent, + ToolCallStatus, UserMessageId, +}; use acp_thread::{AgentConnection, Plan}; -use agent_servers::AgentServer; -use agent_settings::{AgentSettings, NotifyWhenAgentWaiting}; +use action_log::ActionLog; +use agent_client_protocol::{self as acp}; +use agent_servers::{AgentServer, ClaudeCode}; +use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, NotifyWhenAgentWaiting}; +use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore}; +use anyhow::bail; use audio::{Audio, Sound}; -use std::cell::RefCell; -use std::collections::BTreeMap; -use std::path::Path; -use std::process::ExitStatus; -use std::rc::Rc; -use std::sync::Arc; -use std::time::Duration; - -use agent_client_protocol as acp; -use assistant_tool::ActionLog; use buffer_diff::BufferDiff; +use client::zed_urls; use collections::{HashMap, HashSet}; -use editor::{ - AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, EditorMode, - EditorStyle, MinimapVisibility, MultiBuffer, PathKey, -}; +use editor::scroll::Autoscroll; +use editor::{Editor, EditorMode, MultiBuffer, PathKey, SelectionEffects}; use file_icons::FileIcons; +use fs::Fs; use gpui::{ - Action, Animation, AnimationExt, App, BorderStyle, EdgesRefinement, Empty, Entity, EntityId, - FocusHandle, Focusable, Hsla, Length, ListOffset, ListState, MouseButton, PlatformDisplay, - SharedString, Stateful, StyleRefinement, Subscription, Task, TextStyle, TextStyleRefinement, - Transformation, UnderlineStyle, WeakEntity, Window, WindowHandle, div, linear_color_stop, - linear_gradient, list, percentage, point, prelude::*, pulsating_between, + Action, Animation, AnimationExt, AnyView, App, BorderStyle, ClickEvent, ClipboardItem, + EdgesRefinement, ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, Length, ListOffset, + ListState, MouseButton, PlatformDisplay, SharedString, Stateful, StyleRefinement, Subscription, + Task, TextStyle, TextStyleRefinement, Transformation, UnderlineStyle, WeakEntity, Window, + WindowHandle, div, ease_in_out, linear_color_stop, linear_gradient, list, percentage, point, + prelude::*, pulsating_between, }; -use language::language_settings::SoftWrap; -use language::{Buffer, Language}; +use language::Buffer; + +use language_model::LanguageModelRegistry; use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle}; -use parking_lot::Mutex; -use project::Project; +use project::{Project, ProjectEntryId}; +use prompt_store::{PromptId, PromptStore}; +use rope::Point; use settings::{Settings as _, SettingsStore}; -use text::{Anchor, BufferSnapshot}; +use std::sync::Arc; +use std::time::Instant; +use std::{collections::BTreeMap, rc::Rc, time::Duration}; +use text::Anchor; use theme::ThemeSettings; use ui::{ - Disclosure, Divider, DividerColor, KeyBinding, Scrollbar, ScrollbarState, Tooltip, prelude::*, + Callout, Disclosure, Divider, DividerColor, ElevationIndex, KeyBinding, PopoverMenuHandle, + Scrollbar, ScrollbarState, Tooltip, prelude::*, }; -use util::ResultExt; +use util::{ResultExt, size::format_file_size, time::duration_alt_display}; use workspace::{CollaboratorId, Workspace}; -use zed_actions::agent::{Chat, NextHistoryMessage, PreviousHistoryMessage}; - -use ::acp_thread::{ - AcpThread, AcpThreadEvent, AgentThreadEntry, AssistantMessage, AssistantMessageChunk, Diff, - LoadError, MentionPath, ThreadStatus, ToolCall, ToolCallContent, ToolCallStatus, -}; +use zed_actions::agent::{Chat, ToggleModelSelector}; +use zed_actions::assistant::OpenRulesLibrary; -use crate::acp::completion_provider::{ContextPickerCompletionProvider, MentionSet}; -use crate::acp::message_history::MessageHistory; +use super::entry_view_state::EntryViewState; +use crate::acp::AcpModelSelectorPopover; +use crate::acp::entry_view_state::{EntryViewEvent, ViewEvent}; +use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; use crate::agent_diff::AgentDiff; -use crate::message_editor::{MAX_EDITOR_LINES, MIN_EDITOR_LINES}; -use crate::ui::{AgentNotification, AgentNotificationEvent}; +use crate::profile_selector::{ProfileProvider, ProfileSelector}; + +use crate::ui::preview::UsageCallout; +use crate::ui::{AgentNotification, AgentNotificationEvent, BurnModeTooltip}; use crate::{ - AgentDiffPane, AgentPanel, ExpandMessageEditor, Follow, KeepAll, OpenAgentDiff, RejectAll, + AgentDiffPane, AgentPanel, ContinueThread, ContinueWithBurnMode, ExpandMessageEditor, Follow, + KeepAll, OpenAgentDiff, OpenHistory, RejectAll, ToggleBurnMode, ToggleProfileSelector, }; const RESPONSE_PADDING_X: Pixels = px(19.); +pub const MIN_EDITOR_LINES: usize = 4; +pub const MAX_EDITOR_LINES: usize = 8; + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +enum ThreadFeedback { + Positive, + Negative, +} + +enum ThreadError { + PaymentRequired, + ModelRequestLimitReached(cloud_llm_client::Plan), + ToolUseLimitReached, + AuthenticationRequired(SharedString), + Other(SharedString), +} + +impl ThreadError { + fn from_err(error: anyhow::Error, agent: &Rc) -> Self { + if error.is::() { + Self::PaymentRequired + } else if error.is::() { + Self::ToolUseLimitReached + } else if let Some(error) = + error.downcast_ref::() + { + Self::ModelRequestLimitReached(error.plan) + } else { + let string = error.to_string(); + // TODO: we should have Gemini return better errors here. + if agent.clone().downcast::().is_some() + && string.contains("Could not load the default credentials") + || string.contains("API key not valid") + || string.contains("Request had invalid authentication credentials") + { + Self::AuthenticationRequired(string.into()) + } else { + Self::Other(error.to_string().into()) + } + } + } +} + +impl ProfileProvider for Entity { + fn profile_id(&self, cx: &App) -> AgentProfileId { + self.read(cx).profile().clone() + } + + fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) { + self.update(cx, |thread, _cx| { + thread.set_profile(profile_id); + }); + } + + fn profiles_supported(&self, cx: &App) -> bool { + self.read(cx) + .model() + .is_some_and(|model| model.supports_tools()) + } +} + +#[derive(Default)] +struct ThreadFeedbackState { + feedback: Option, + comments_editor: Option>, +} + +impl ThreadFeedbackState { + pub fn submit( + &mut self, + thread: Entity, + feedback: ThreadFeedback, + window: &mut Window, + cx: &mut App, + ) { + let Some(telemetry) = thread.read(cx).connection().telemetry() else { + return; + }; + + if self.feedback == Some(feedback) { + return; + } + + self.feedback = Some(feedback); + match feedback { + ThreadFeedback::Positive => { + self.comments_editor = None; + } + ThreadFeedback::Negative => { + self.comments_editor = Some(Self::build_feedback_comments_editor(window, cx)); + } + } + let session_id = thread.read(cx).session_id().clone(); + let agent_name = telemetry.agent_name(); + let task = telemetry.thread_data(&session_id, cx); + let rating = match feedback { + ThreadFeedback::Positive => "positive", + ThreadFeedback::Negative => "negative", + }; + cx.background_spawn(async move { + let thread = task.await?; + telemetry::event!( + "Agent Thread Rated", + session_id = session_id, + rating = rating, + agent = agent_name, + thread = thread + ); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + pub fn submit_comments(&mut self, thread: Entity, cx: &mut App) { + let Some(telemetry) = thread.read(cx).connection().telemetry() else { + return; + }; + + let Some(comments) = self + .comments_editor + .as_ref() + .map(|editor| editor.read(cx).text(cx)) + .filter(|text| !text.trim().is_empty()) + else { + return; + }; + + self.comments_editor.take(); + + let session_id = thread.read(cx).session_id().clone(); + let agent_name = telemetry.agent_name(); + let task = telemetry.thread_data(&session_id, cx); + cx.background_spawn(async move { + let thread = task.await?; + telemetry::event!( + "Agent Thread Feedback Comments", + session_id = session_id, + comments = comments, + agent = agent_name, + thread = thread + ); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + pub fn clear(&mut self) { + *self = Self::default() + } + + pub fn dismiss_comments(&mut self) { + self.comments_editor.take(); + } + + fn build_feedback_comments_editor(window: &mut Window, cx: &mut App) -> Entity { + let buffer = cx.new(|cx| { + let empty_string = String::new(); + MultiBuffer::singleton(cx.new(|cx| Buffer::local(empty_string, cx)), cx) + }); + + let editor = cx.new(|cx| { + let mut editor = Editor::new( + editor::EditorMode::AutoHeight { + min_lines: 1, + max_lines: Some(4), + }, + buffer, + None, + window, + cx, + ); + editor.set_placeholder_text( + "What went wrong? Share your feedback so we can improve.", + cx, + ); + editor + }); + + editor.read(cx).focus_handle(cx).focus(window); + editor + } +} pub struct AcpThreadView { agent: Rc, workspace: WeakEntity, project: Entity, thread_state: ThreadState, - diff_editors: HashMap>, - message_editor: Entity, - message_set_from_history: Option, - _message_editor_subscription: Subscription, - mention_set: Arc>, + history_store: Entity, + hovered_recent_history_item: Option, + entry_view_state: Entity, + message_editor: Entity, + model_selector: Option>, + profile_selector: Option>, notifications: Vec>, notification_subscriptions: HashMap, Vec>, - last_error: Option>, + thread_retry_status: Option, + thread_error: Option, + thread_feedback: ThreadFeedbackState, list_state: ListState, scrollbar_state: ScrollbarState, auth_task: Option>, @@ -78,9 +270,9 @@ pub struct AcpThreadView { edits_expanded: bool, plan_expanded: bool, editor_expanded: bool, - message_history: Rc>>>, + editing_message: Option, _cancel_task: Option>, - _subscriptions: [Subscription; 1], + _subscriptions: [Subscription; 3], } enum ThreadState { @@ -94,122 +286,98 @@ enum ThreadState { LoadError(LoadError), Unauthenticated { connection: Rc, - }, - ServerExited { - status: ExitStatus, + description: Option>, + configuration_view: Option, + pending_auth_method: Option, + _subscription: Option, }, } impl AcpThreadView { pub fn new( agent: Rc, + resume_thread: Option, + summarize_thread: Option, workspace: WeakEntity, project: Entity, - message_history: Rc>>>, - min_lines: usize, - max_lines: Option, + history_store: Entity, + prompt_store: Option>, window: &mut Window, cx: &mut Context, ) -> Self { - let language = Language::new( - language::LanguageConfig { - completion_query_characters: HashSet::from_iter(['.', '-', '_', '@']), - ..Default::default() - }, - None, - ); - - let mention_set = Arc::new(Mutex::new(MentionSet::default())); - + let prevent_slash_commands = agent.clone().downcast::().is_some(); let message_editor = cx.new(|cx| { - let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - - let mut editor = Editor::new( + let mut editor = MessageEditor::new( + workspace.clone(), + project.clone(), + history_store.clone(), + prompt_store.clone(), + "Message the agent — @ to include context", + prevent_slash_commands, editor::EditorMode::AutoHeight { - min_lines, - max_lines: max_lines, + min_lines: MIN_EDITOR_LINES, + max_lines: Some(MAX_EDITOR_LINES), }, - buffer, - None, window, cx, ); - editor.set_placeholder_text("Message the agent - @ to include files", cx); - editor.set_show_indent_guides(false, cx); - editor.set_soft_wrap(); - editor.set_use_modal_editing(true); - editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( - mention_set.clone(), - workspace.clone(), - cx.weak_entity(), - )))); - editor.set_context_menu_options(ContextMenuOptions { - min_entries_visible: 12, - max_entries_visible: 12, - placement: Some(ContextMenuPlacement::Above), - }); + if let Some(entry) = summarize_thread { + editor.insert_thread_summary(entry, window, cx); + } editor }); - let message_editor_subscription = - cx.subscribe(&message_editor, |this, editor, event, cx| { - if let editor::EditorEvent::BufferEdited = &event { - let buffer = editor - .read(cx) - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .read(cx) - .snapshot(); - if let Some(message) = this.message_set_from_history.clone() - && message.version() != buffer.version() - { - this.message_set_from_history = None; - } - - if this.message_set_from_history.is_none() { - this.message_history.borrow_mut().reset_position(); - } - } - }); - - let mention_set = mention_set.clone(); - let list_state = ListState::new(0, gpui::ListAlignment::Bottom, px(2048.0)); - let subscription = cx.observe_global_in::(window, Self::settings_changed); + let entry_view_state = cx.new(|_| { + EntryViewState::new( + workspace.clone(), + project.clone(), + history_store.clone(), + prompt_store.clone(), + prevent_slash_commands, + ) + }); + + let subscriptions = [ + cx.observe_global_in::(window, Self::settings_changed), + cx.subscribe_in(&message_editor, window, Self::handle_message_editor_event), + cx.subscribe_in(&entry_view_state, window, Self::handle_entry_view_event), + ]; Self { agent: agent.clone(), workspace: workspace.clone(), project: project.clone(), - thread_state: Self::initial_state(agent, workspace, project, window, cx), + entry_view_state, + thread_state: Self::initial_state(agent, resume_thread, workspace, project, window, cx), message_editor, - message_set_from_history: None, - _message_editor_subscription: message_editor_subscription, - mention_set, + model_selector: None, + profile_selector: None, notifications: Vec::new(), notification_subscriptions: HashMap::default(), - diff_editors: Default::default(), list_state: list_state.clone(), scrollbar_state: ScrollbarState::new(list_state).parent_entity(&cx.entity()), - last_error: None, + thread_retry_status: None, + thread_error: None, + thread_feedback: Default::default(), auth_task: None, expanded_tool_calls: HashSet::default(), expanded_thinking_blocks: HashSet::default(), + editing_message: None, edits_expanded: false, plan_expanded: false, editor_expanded: false, - message_history, - _subscriptions: [subscription], + history_store, + hovered_recent_history_item: None, + _subscriptions: subscriptions, _cancel_task: None, } } fn initial_state( agent: Rc, + resume_thread: Option, workspace: WeakEntity, project: Entity, window: &mut Window, @@ -236,39 +404,42 @@ impl AcpThreadView { } }; - // this.update_in(cx, |_this, _window, cx| { - // let status = connection.exit_status(cx); - // cx.spawn(async move |this, cx| { - // let status = status.await.ok(); - // this.update(cx, |this, cx| { - // this.thread_state = ThreadState::ServerExited { status }; - // cx.notify(); - // }) - // .ok(); - // }) - // .detach(); - // }) - // .ok(); - - let result = match connection + let result = if let Some(native_agent) = connection .clone() - .new_thread(project.clone(), &root_dir, cx) - .await + .downcast::() + && let Some(resume) = resume_thread.clone() { - Err(e) => { - let mut cx = cx.clone(); - if e.is::() { - this.update(&mut cx, |this, cx| { - this.thread_state = ThreadState::Unauthenticated { connection }; - cx.notify(); + cx.update(|_, cx| { + native_agent + .0 + .update(cx, |agent, cx| agent.open_thread(resume.id, cx)) + }) + .log_err() + } else { + cx.update(|_, cx| { + connection + .clone() + .new_thread(project.clone(), &root_dir, cx) + }) + .log_err() + }; + + let Some(result) = result else { + return; + }; + + let result = match result.await { + Err(e) => match e.downcast::() { + Ok(err) => { + cx.update(|window, cx| { + Self::handle_auth_required(this, err, agent, connection, window, cx) }) - .ok(); + .log_err(); return; - } else { - Err(e) } - } - Ok(session_id) => Ok(session_id), + Err(err) => Err(err), + }, + Ok(thread) => Ok(thread), }; this.update_in(cx, |this, window, cx| { @@ -281,16 +452,64 @@ impl AcpThreadView { let action_log_subscription = cx.observe(&action_log, |_, _, cx| cx.notify()); - this.list_state - .splice(0..0, thread.read(cx).entries().len()); + let count = thread.read(cx).entries().len(); + this.list_state.splice(0..0, count); + this.entry_view_state.update(cx, |view_state, cx| { + for ix in 0..count { + view_state.sync_entry(ix, &thread, window, cx); + } + }); + + if let Some(resume) = resume_thread { + this.history_store.update(cx, |history, cx| { + history.push_recently_opened_entry( + HistoryEntryId::AcpThread(resume.id), + cx, + ); + }); + } AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx); + this.model_selector = + thread + .read(cx) + .connection() + .model_selector() + .map(|selector| { + cx.new(|cx| { + AcpModelSelectorPopover::new( + thread.read(cx).session_id().clone(), + selector, + PopoverMenuHandle::default(), + this.focus_handle(cx), + window, + cx, + ) + }) + }); + this.thread_state = ThreadState::Ready { thread, _subscription: [thread_subscription, action_log_subscription], }; + this.profile_selector = this.as_native_thread(cx).map(|thread| { + cx.new(|cx| { + ProfileSelector::new( + ::global(cx), + Arc::new(thread.clone()), + this.focus_handle(cx), + cx, + ) + }) + }); + + this.message_editor.update(cx, |message_editor, _cx| { + message_editor + .set_prompt_capabilities(connection.prompt_capabilities()); + }); + cx.notify(); } Err(err) => { @@ -304,6 +523,70 @@ impl AcpThreadView { ThreadState::Loading { _task: load_task } } + fn handle_auth_required( + this: WeakEntity, + err: AuthRequired, + agent: Rc, + connection: Rc, + window: &mut Window, + cx: &mut App, + ) { + let agent_name = agent.name(); + let (configuration_view, subscription) = if let Some(provider_id) = err.provider_id { + let registry = LanguageModelRegistry::global(cx); + + let sub = window.subscribe(®istry, cx, { + let provider_id = provider_id.clone(); + let this = this.clone(); + move |_, ev, window, cx| { + if let language_model::Event::ProviderStateChanged(updated_provider_id) = &ev + && &provider_id == updated_provider_id + { + this.update(cx, |this, cx| { + this.thread_state = Self::initial_state( + agent.clone(), + None, + this.workspace.clone(), + this.project.clone(), + window, + cx, + ); + cx.notify(); + }) + .ok(); + } + } + }); + + let view = registry.read(cx).provider(&provider_id).map(|provider| { + provider.configuration_view( + language_model::ConfigurationViewTargetAgent::Other(agent_name), + window, + cx, + ) + }); + + (view, Some(sub)) + } else { + (None, None) + }; + + this.update(cx, |this, cx| { + this.thread_state = ThreadState::Unauthenticated { + pending_auth_method: None, + connection, + configuration_view, + description: err + .description + .clone() + .map(|desc| cx.new(|cx| Markdown::new(desc.into(), None, None, cx))), + _subscription: subscription, + }; + cx.notify(); + }) + .ok(); + } + fn handle_load_error(&mut self, err: anyhow::Error, cx: &mut Context) { if let Some(load_err) = err.downcast_ref::() { self.thread_state = ThreadState::LoadError(load_err.clone()); @@ -313,13 +596,16 @@ impl AcpThreadView { cx.notify(); } + pub fn workspace(&self) -> &WeakEntity { + &self.workspace + } + pub fn thread(&self) -> Option<&Entity> { match &self.thread_state { ThreadState::Ready { thread, .. } => Some(thread), ThreadState::Unauthenticated { .. } | ThreadState::Loading { .. } - | ThreadState::LoadError(..) - | ThreadState::ServerExited { .. } => None, + | ThreadState::LoadError { .. } => None, } } @@ -328,13 +614,13 @@ impl AcpThreadView { ThreadState::Ready { thread, .. } => thread.read(cx).title(), ThreadState::Loading { .. } => "Loading…".into(), ThreadState::LoadError(_) => "Failed to load".into(), - ThreadState::Unauthenticated { .. } => "Not authenticated".into(), - ThreadState::ServerExited { .. } => "Server exited unexpectedly".into(), + ThreadState::Unauthenticated { .. } => "Authentication Required".into(), } } - pub fn cancel(&mut self, cx: &mut Context) { - self.last_error.take(); + pub fn cancel_generation(&mut self, cx: &mut Context) { + self.thread_error.take(); + self.thread_retry_status.take(); if let Some(thread) = self.thread() { self._cancel_task = Some(thread.update(cx, |thread, cx| thread.cancel(cx))); @@ -353,167 +639,246 @@ impl AcpThreadView { fn set_editor_is_expanded(&mut self, is_expanded: bool, cx: &mut Context) { self.editor_expanded = is_expanded; - self.message_editor.update(cx, |editor, _| { - if self.editor_expanded { - editor.set_mode(EditorMode::Full { - scale_ui_elements_with_buffer_font_size: false, - show_active_line_background: false, - sized_by_content: false, - }) + self.message_editor.update(cx, |editor, cx| { + if is_expanded { + editor.set_mode( + EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sized_by_content: false, + }, + cx, + ) } else { - editor.set_mode(EditorMode::AutoHeight { - min_lines: MIN_EDITOR_LINES, - max_lines: Some(MAX_EDITOR_LINES), - }) + editor.set_mode( + EditorMode::AutoHeight { + min_lines: MIN_EDITOR_LINES, + max_lines: Some(MAX_EDITOR_LINES), + }, + cx, + ) } }); cx.notify(); } - fn chat(&mut self, _: &Chat, window: &mut Window, cx: &mut Context) { - self.last_error.take(); - - let mut ix = 0; - let mut chunks: Vec = Vec::new(); - let project = self.project.clone(); - self.message_editor.update(cx, |editor, cx| { - let text = editor.text(cx); - editor.display_map.update(cx, |map, cx| { - let snapshot = map.snapshot(cx); - for (crease_id, crease) in snapshot.crease_snapshot.creases() { - // Skip creases that have been edited out of the message buffer. - if !crease.range().start.is_valid(&snapshot.buffer_snapshot) { - continue; - } + pub fn handle_message_editor_event( + &mut self, + _: &Entity, + event: &MessageEditorEvent, + window: &mut Window, + cx: &mut Context, + ) { + match event { + MessageEditorEvent::Send => self.send(window, cx), + MessageEditorEvent::Cancel => self.cancel_generation(cx), + MessageEditorEvent::Focus => { + self.cancel_editing(&Default::default(), window, cx); + } + } + } - if let Some(project_path) = - self.mention_set.lock().path_for_crease_id(crease_id) - { - let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot); - if crease_range.start > ix { - chunks.push(text[ix..crease_range.start].into()); - } - if let Some(abs_path) = project.read(cx).absolute_path(&project_path, cx) { - let path_str = abs_path.display().to_string(); - chunks.push(acp::ContentBlock::ResourceLink(acp::ResourceLink { - uri: path_str.clone(), - name: path_str, - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - })); - } - ix = crease_range.end; - } + pub fn handle_entry_view_event( + &mut self, + _: &Entity, + event: &EntryViewEvent, + window: &mut Window, + cx: &mut Context, + ) { + match &event.view_event { + ViewEvent::NewDiff(tool_call_id) => { + if AgentSettings::get_global(cx).expand_edit_card { + self.expanded_tool_calls.insert(tool_call_id.clone()); } - - if ix < text.len() { - let last_chunk = text[ix..].trim(); - if !last_chunk.is_empty() { - chunks.push(last_chunk.into()); - } + } + ViewEvent::NewTerminal(tool_call_id) => { + if AgentSettings::get_global(cx).expand_terminal_card { + self.expanded_tool_calls.insert(tool_call_id.clone()); } - }) - }); - - if chunks.is_empty() { - return; + } + ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Focus) => { + if let Some(thread) = self.thread() + && let Some(AgentThreadEntry::UserMessage(user_message)) = + thread.read(cx).entries().get(event.entry_index) + && user_message.id.is_some() + { + self.editing_message = Some(event.entry_index); + cx.notify(); + } + } + ViewEvent::MessageEditorEvent(editor, MessageEditorEvent::Send) => { + self.regenerate(event.entry_index, editor, window, cx); + } + ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Cancel) => { + self.cancel_editing(&Default::default(), window, cx); + } } + } + fn resume_chat(&mut self, cx: &mut Context) { + self.thread_error.take(); let Some(thread) = self.thread() else { return; }; - let task = thread.update(cx, |thread, cx| thread.send(chunks.clone(), cx)); + let task = thread.update(cx, |thread, cx| thread.resume(cx)); cx.spawn(async move |this, cx| { let result = task.await; this.update(cx, |this, cx| { if let Err(err) = result { - this.last_error = - Some(cx.new(|cx| Markdown::new(err.to_string().into(), None, None, cx))) + this.handle_thread_error(err, cx); } }) }) .detach(); + } - let mention_set = self.mention_set.clone(); - - self.set_editor_is_expanded(false, cx); - - self.message_editor.update(cx, |editor, cx| { - editor.clear(window, cx); - editor.remove_creases(mention_set.lock().drain(), cx) + fn send(&mut self, window: &mut Window, cx: &mut Context) { + let Some(thread) = self.thread() else { return }; + self.history_store.update(cx, |history, cx| { + history.push_recently_opened_entry( + HistoryEntryId::AcpThread(thread.read(cx).session_id().clone()), + cx, + ); }); - self.scroll_to_bottom(cx); + if thread.read(cx).status() != ThreadStatus::Idle { + self.stop_current_and_send_new_message(window, cx); + return; + } - self.message_history.borrow_mut().push(chunks); + let contents = self + .message_editor + .update(cx, |message_editor, cx| message_editor.contents(window, cx)); + self.send_impl(contents, window, cx) } - fn previous_history_message( - &mut self, - _: &PreviousHistoryMessage, - window: &mut Window, - cx: &mut Context, - ) { - if self.message_set_from_history.is_none() && !self.message_editor.read(cx).is_empty(cx) { - self.message_editor.update(cx, |editor, cx| { - editor.move_up(&Default::default(), window, cx); - }); + fn stop_current_and_send_new_message(&mut self, window: &mut Window, cx: &mut Context) { + let Some(thread) = self.thread().cloned() else { return; - } + }; - self.message_set_from_history = Self::set_draft_message( - self.message_editor.clone(), - self.mention_set.clone(), - self.project.clone(), - self.message_history - .borrow_mut() - .prev() - .map(|blocks| blocks.as_slice()), - window, - cx, - ); + let cancelled = thread.update(cx, |thread, cx| thread.cancel(cx)); + + let contents = self + .message_editor + .update(cx, |message_editor, cx| message_editor.contents(window, cx)); + + cx.spawn_in(window, async move |this, cx| { + cancelled.await; + + this.update_in(cx, |this, window, cx| { + this.send_impl(contents, window, cx); + }) + .ok(); + }) + .detach(); } - fn next_history_message( + fn send_impl( &mut self, - _: &NextHistoryMessage, + contents: Task, Vec>)>>, window: &mut Window, cx: &mut Context, ) { - if self.message_set_from_history.is_none() { - self.message_editor.update(cx, |editor, cx| { - editor.move_down(&Default::default(), window, cx); - }); + self.thread_error.take(); + self.editing_message.take(); + self.thread_feedback.clear(); + + let Some(thread) = self.thread().cloned() else { return; - } + }; + let task = cx.spawn_in(window, async move |this, cx| { + let (contents, tracked_buffers) = contents.await?; - let mut message_history = self.message_history.borrow_mut(); - let next_history = message_history.next(); + if contents.is_empty() { + return Ok(()); + } - let set_draft_message = Self::set_draft_message( - self.message_editor.clone(), - self.mention_set.clone(), - self.project.clone(), - Some( - next_history - .map(|blocks| blocks.as_slice()) - .unwrap_or_else(|| &[]), - ), - window, - cx, - ); - // If we reset the text to an empty string because we ran out of history, - // we don't want to mark it as coming from the history - self.message_set_from_history = if next_history.is_some() { - set_draft_message - } else { - None + this.update_in(cx, |this, window, cx| { + this.set_editor_is_expanded(false, cx); + this.scroll_to_bottom(cx); + this.message_editor.update(cx, |message_editor, cx| { + message_editor.clear(window, cx); + }); + })?; + let send = thread.update(cx, |thread, cx| { + thread.action_log().update(cx, |action_log, cx| { + for buffer in tracked_buffers { + action_log.buffer_read(buffer, cx) + } + }); + thread.send(contents, cx) + })?; + send.await + }); + + cx.spawn(async move |this, cx| { + if let Err(err) = task.await { + this.update(cx, |this, cx| { + this.handle_thread_error(err, cx); + }) + .ok(); + } + }) + .detach(); + } + + fn cancel_editing(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context) { + let Some(thread) = self.thread().cloned() else { + return; + }; + + if let Some(index) = self.editing_message.take() + && let Some(editor) = self + .entry_view_state + .read(cx) + .entry(index) + .and_then(|e| e.message_editor()) + .cloned() + { + editor.update(cx, |editor, cx| { + if let Some(user_message) = thread + .read(cx) + .entries() + .get(index) + .and_then(|e| e.user_message()) + { + editor.set_message(user_message.chunks.clone(), window, cx); + } + }) + }; + self.focus_handle(cx).focus(window); + cx.notify(); + } + + fn regenerate( + &mut self, + entry_ix: usize, + message_editor: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(thread) = self.thread().cloned() else { + return; + }; + + let Some(rewind) = thread.update(cx, |thread, cx| { + let user_message_id = thread.entries().get(entry_ix)?.user_message()?.id.clone()?; + Some(thread.rewind(user_message_id, cx)) + }) else { + return; }; + + let contents = + message_editor.update(cx, |message_editor, cx| message_editor.contents(window, cx)); + + let task = cx.foreground_executor().spawn(async move { + rewind.await?; + contents.await + }); + self.send_impl(task, window, cx); } fn open_agent_diff(&mut self, _: &OpenAgentDiff, window: &mut Window, cx: &mut Context) { @@ -539,86 +904,50 @@ impl AcpThreadView { }; diff.update(cx, |diff, cx| { - diff.move_to_path(PathKey::for_buffer(&buffer, cx), window, cx) + diff.move_to_path(PathKey::for_buffer(buffer, cx), window, cx) }) } - fn set_draft_message( - message_editor: Entity, - mention_set: Arc>, - project: Entity, - message: Option<&[acp::ContentBlock]>, - window: &mut Window, - cx: &mut Context, - ) -> Option { - cx.notify(); - - let message = message?; - - let mut text = String::new(); - let mut mentions = Vec::new(); - - for chunk in message { - match chunk { - acp::ContentBlock::Text(text_content) => { - text.push_str(&text_content.text); - } - acp::ContentBlock::ResourceLink(resource_link) => { - let path = Path::new(&resource_link.uri); - let start = text.len(); - let content = MentionPath::new(&path).to_string(); - text.push_str(&content); - let end = text.len(); - if let Some(project_path) = - project.read(cx).project_path_for_absolute_path(&path, cx) - { - let filename: SharedString = path - .file_name() - .unwrap_or_default() - .to_string_lossy() - .to_string() - .into(); - mentions.push((start..end, project_path, filename)); - } + fn handle_open_rules(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context) { + let Some(thread) = self.as_native_thread(cx) else { + return; + }; + let project_context = thread.read(cx).project_context().read(cx); + + let project_entry_ids = project_context + .worktrees + .iter() + .flat_map(|worktree| worktree.rules_file.as_ref()) + .map(|rules_file| ProjectEntryId::from_usize(rules_file.project_entry_id)) + .collect::>(); + + self.workspace + .update(cx, move |workspace, cx| { + // TODO: Open a multibuffer instead? In some cases this doesn't make the set of rules + // files clear. For example, if rules file 1 is already open but rules file 2 is not, + // this would open and focus rules file 2 in a tab that is not next to rules file 1. + let project = workspace.project().read(cx); + let project_paths = project_entry_ids + .into_iter() + .flat_map(|entry_id| project.path_for_entry(entry_id, cx)) + .collect::>(); + for project_path in project_paths { + workspace + .open_path(project_path, None, true, window, cx) + .detach_and_log_err(cx); } - acp::ContentBlock::Image(_) - | acp::ContentBlock::Audio(_) - | acp::ContentBlock::Resource(_) => {} - } - } - - let snapshot = message_editor.update(cx, |editor, cx| { - editor.set_text(text, window, cx); - editor.buffer().read(cx).snapshot(cx) - }); - - for (range, project_path, filename) in mentions { - let crease_icon_path = if project_path.path.is_dir() { - FileIcons::get_folder_icon(false, cx) - .unwrap_or_else(|| IconName::Folder.path().into()) - } else { - FileIcons::get_icon(Path::new(project_path.path.as_ref()), cx) - .unwrap_or_else(|| IconName::File.path().into()) - }; + }) + .ok(); + } - let anchor = snapshot.anchor_before(range.start); - let crease_id = crate::context_picker::insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, - range.end - range.start, - filename, - crease_icon_path, - message_editor.clone(), - window, - cx, - ); - if let Some(crease_id) = crease_id { - mention_set.lock().insert(crease_id, project_path); - } - } + fn handle_thread_error(&mut self, error: anyhow::Error, cx: &mut Context) { + self.thread_error = Some(ThreadError::from_err(error, &self.agent)); + cx.notify(); + } - let snapshot = snapshot.as_singleton().unwrap().2.clone(); - Some(snapshot.text) + fn clear_thread_error(&mut self, cx: &mut Context) { + self.thread_error = None; + cx.notify(); } fn handle_thread_event( @@ -628,22 +957,33 @@ impl AcpThreadView { window: &mut Window, cx: &mut Context, ) { - let count = self.list_state.item_count(); match event { AcpThreadEvent::NewEntry => { - let index = thread.read(cx).entries().len() - 1; - self.sync_thread_entry_view(index, window, cx); - self.list_state.splice(count..count, 1); + let len = thread.read(cx).entries().len(); + let index = len - 1; + self.entry_view_state.update(cx, |view_state, cx| { + view_state.sync_entry(index, thread, window, cx) + }); + self.list_state.splice(index..index, 1); } AcpThreadEvent::EntryUpdated(index) => { - let index = *index; - self.sync_thread_entry_view(index, window, cx); - self.list_state.splice(index..index + 1, 1); + self.entry_view_state.update(cx, |view_state, cx| { + view_state.sync_entry(*index, thread, window, cx) + }); + } + AcpThreadEvent::EntriesRemoved(range) => { + self.entry_view_state + .update(cx, |view_state, _cx| view_state.remove(range.clone())); + self.list_state.splice(range.clone(), 0); } AcpThreadEvent::ToolAuthorizationRequired => { self.notify_with_sound("Waiting for tool confirmation", IconName::Info, window, cx); } + AcpThreadEvent::Retry(retry) => { + self.thread_retry_status = Some(retry.clone()); + } AcpThreadEvent::Stopped => { + self.thread_retry_status.take(); let used_tools = thread.read(cx).used_tools_since_last_user_message(); self.notify_with_sound( if used_tools { @@ -657,6 +997,7 @@ impl AcpThreadView { ); } AcpThreadEvent::Error => { + self.thread_retry_status.take(); self.notify_with_sound( "Agent stopped due to an error", IconName::Warning, @@ -664,89 +1005,89 @@ impl AcpThreadView { cx, ); } - AcpThreadEvent::ServerExited(status) => { - self.thread_state = ThreadState::ServerExited { status: *status }; + AcpThreadEvent::LoadError(error) => { + self.thread_retry_status.take(); + self.thread_state = ThreadState::LoadError(error.clone()); } + AcpThreadEvent::TitleUpdated | AcpThreadEvent::TokenUsageUpdated => {} } cx.notify(); } - fn sync_thread_entry_view( + fn authenticate( &mut self, - entry_ix: usize, + method: acp::AuthMethodId, window: &mut Window, cx: &mut Context, ) { - let Some(multibuffers) = self.entry_diff_multibuffers(entry_ix, cx) else { + let ThreadState::Unauthenticated { + connection, + pending_auth_method, + configuration_view, + .. + } = &mut self.thread_state + else { return; }; - let multibuffers = multibuffers.collect::>(); - - for multibuffer in multibuffers { - if self.diff_editors.contains_key(&multibuffer.entity_id()) { + if method.0.as_ref() == "gemini-api-key" { + let registry = LanguageModelRegistry::global(cx); + let provider = registry + .read(cx) + .provider(&language_model::GOOGLE_PROVIDER_ID) + .unwrap(); + if !provider.is_authenticated(cx) { + let this = cx.weak_entity(); + let agent = self.agent.clone(); + let connection = connection.clone(); + window.defer(cx, |window, cx| { + Self::handle_auth_required( + this, + AuthRequired { + description: Some("GEMINI_API_KEY must be set".to_owned()), + provider_id: Some(language_model::GOOGLE_PROVIDER_ID), + }, + agent, + connection, + window, + cx, + ); + }); return; } - - let editor = cx.new(|cx| { - let mut editor = Editor::new( - EditorMode::Full { - scale_ui_elements_with_buffer_font_size: false, - show_active_line_background: false, - sized_by_content: true, - }, - multibuffer.clone(), - None, - window, - cx, - ); - editor.set_show_gutter(false, cx); - editor.disable_inline_diagnostics(); - editor.disable_expand_excerpt_buttons(cx); - editor.set_show_vertical_scrollbar(false, cx); - editor.set_minimap_visibility(MinimapVisibility::Disabled, window, cx); - editor.set_soft_wrap_mode(SoftWrap::None, cx); - editor.scroll_manager.set_forbid_vertical_scroll(true); - editor.set_show_indent_guides(false, cx); - editor.set_read_only(true); - editor.set_show_breakpoints(false, cx); - editor.set_show_code_actions(false, cx); - editor.set_show_git_diff_gutter(false, cx); - editor.set_expand_all_diff_hunks(cx); - editor.set_text_style_refinement(diff_editor_text_style_refinement(cx)); - editor - }); - let entity_id = multibuffer.entity_id(); - cx.observe_release(&multibuffer, move |this, _, _| { - this.diff_editors.remove(&entity_id); - }) - .detach(); - - self.diff_editors.insert(entity_id, editor); - } - } - - fn entry_diff_multibuffers( - &self, - entry_ix: usize, - cx: &App, - ) -> Option>> { - let entry = self.thread()?.read(cx).entries().get(entry_ix)?; - Some(entry.diffs().map(|diff| diff.multibuffer.clone())) - } - - fn authenticate( - &mut self, - method: acp::AuthMethodId, - window: &mut Window, - cx: &mut Context, - ) { - let ThreadState::Unauthenticated { ref connection } = self.thread_state else { + } else if method.0.as_ref() == "vertex-ai" + && std::env::var("GOOGLE_API_KEY").is_err() + && (std::env::var("GOOGLE_CLOUD_PROJECT").is_err() + || (std::env::var("GOOGLE_CLOUD_PROJECT").is_err())) + { + let this = cx.weak_entity(); + let agent = self.agent.clone(); + let connection = connection.clone(); + + window.defer(cx, |window, cx| { + Self::handle_auth_required( + this, + AuthRequired { + description: Some( + "GOOGLE_API_KEY must be set in the environment to use Vertex AI authentication for Gemini CLI. Please export it and restart Zed." + .to_owned(), + ), + provider_id: None, + }, + agent, + connection, + window, + cx, + ) + }); return; - }; + } - self.last_error.take(); + self.thread_error.take(); + configuration_view.take(); + pending_auth_method.replace(method.clone()); let authenticate = connection.authenticate(method, cx); + cx.notify(); self.auth_task = Some(cx.spawn_in(window, { let project = self.project.clone(); let agent = self.agent.clone(); @@ -755,12 +1096,11 @@ impl AcpThreadView { this.update_in(cx, |this, window, cx| { if let Err(err) = result { - this.last_error = Some(cx.new(|cx| { - Markdown::new(format!("Error: {err}").into(), None, None, cx) - })) + this.handle_thread_error(err, cx); } else { this.thread_state = Self::initial_state( agent, + None, this.workspace.clone(), project.clone(), window, @@ -790,36 +1130,138 @@ impl AcpThreadView { cx.notify(); } + fn rewind(&mut self, message_id: &UserMessageId, cx: &mut Context) { + let Some(thread) = self.thread() else { + return; + }; + thread + .update(cx, |thread, cx| thread.rewind(message_id.clone(), cx)) + .detach_and_log_err(cx); + cx.notify(); + } + fn render_entry( &self, - index: usize, + entry_ix: usize, total_entries: usize, entry: &AgentThreadEntry, window: &mut Window, cx: &Context, ) -> AnyElement { let primary = match &entry { - AgentThreadEntry::UserMessage(message) => div() - .py_4() - .px_2() - .child( - v_flex() - .p_3() - .gap_1p5() - .rounded_lg() - .shadow_md() - .bg(cx.theme().colors().editor_background) - .border_1() - .border_color(cx.theme().colors().border) - .text_xs() - .children(message.content.markdown().map(|md| { - self.render_markdown( - md.clone(), - user_message_markdown_style(window, cx), + AgentThreadEntry::UserMessage(message) => { + let Some(editor) = self + .entry_view_state + .read(cx) + .entry(entry_ix) + .and_then(|entry| entry.message_editor()) + .cloned() + else { + return Empty.into_any_element(); + }; + + let editing = self.editing_message == Some(entry_ix); + let editor_focus = editor.focus_handle(cx).is_focused(window); + let focus_border = cx.theme().colors().border_focused; + + let rules_item = if entry_ix == 0 { + self.render_rules_item(cx) + } else { + None + }; + + v_flex() + .id(("user_message", entry_ix)) + .pt_2() + .pb_4() + .px_2() + .gap_1p5() + .w_full() + .children(rules_item) + .children(message.id.clone().and_then(|message_id| { + message.checkpoint.as_ref()?.show.then(|| { + h_flex() + .gap_2() + .child(Divider::horizontal()) + .child( + Button::new("restore-checkpoint", "Restore Checkpoint") + .icon(IconName::Undo) + .icon_size(IconSize::XSmall) + .icon_position(IconPosition::Start) + .label_size(LabelSize::XSmall) + .icon_color(Color::Muted) + .color(Color::Muted) + .on_click(cx.listener(move |this, _, _window, cx| { + this.rewind(&message_id, cx); + })) + ) + .child(Divider::horizontal()) + }) + })) + .child( + div() + .relative() + .child( + div() + .py_3() + .px_2() + .rounded_lg() + .shadow_md() + .bg(cx.theme().colors().editor_background) + .border_1() + .when(editing && !editor_focus, |this| this.border_dashed()) + .border_color(cx.theme().colors().border) + .map(|this|{ + if editing && editor_focus { + this.border_color(focus_border) + } else if message.id.is_some() { + this.hover(|s| s.border_color(focus_border.opacity(0.8))) + } else { + this + } + }) + .text_xs() + .child(editor.clone().into_any_element()), ) - })), - ) - .into_any(), + .when(editing && editor_focus, |this| + this.child( + h_flex() + .absolute() + .top_neg_3p5() + .right_3() + .gap_1() + .rounded_sm() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_background) + .overflow_hidden() + .child( + IconButton::new("cancel", IconName::Close) + .icon_color(Color::Error) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(Self::cancel_editing)) + ) + .child( + IconButton::new("regenerate", IconName::Return) + .icon_color(Color::Muted) + .icon_size(IconSize::XSmall) + .tooltip(Tooltip::text( + "Editing will restart the thread from this point." + )) + .on_click(cx.listener({ + let editor = editor.clone(); + move |this, _, window, cx| { + this.regenerate( + entry_ix, &editor, window, cx, + ); + } + })), + ) + ) + ), + ) + .into_any() + } AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) => { let style = default_markdown_style(false, window, cx); let message_body = v_flex() @@ -836,7 +1278,7 @@ impl AcpThreadView { AssistantMessageChunk::Thought { block } => { block.markdown().map(|md| { self.render_thinking_block( - index, + entry_ix, chunk_ix, md.clone(), window, @@ -852,29 +1294,68 @@ impl AcpThreadView { v_flex() .px_5() .py_1() - .when(index + 1 == total_entries, |this| this.pb_4()) + .when(entry_ix + 1 == total_entries, |this| this.pb_4()) .w_full() .text_ui(cx) .child(message_body) .into_any() } - AgentThreadEntry::ToolCall(tool_call) => div() - .w_full() - .py_1p5() - .px_5() - .child(self.render_tool_call(index, tool_call, window, cx)) - .into_any(), + AgentThreadEntry::ToolCall(tool_call) => { + let has_terminals = tool_call.terminals().next().is_some(); + + div().w_full().py_1p5().px_5().map(|this| { + if has_terminals { + this.children(tool_call.terminals().map(|terminal| { + self.render_terminal_tool_call( + entry_ix, terminal, tool_call, window, cx, + ) + })) + } else { + this.child(self.render_tool_call(entry_ix, tool_call, window, cx)) + } + }) + } + .into_any(), }; let Some(thread) = self.thread() else { return primary; }; + let is_generating = matches!(thread.read(cx).status(), ThreadStatus::Generating); - if index == total_entries - 1 && !is_generating { + let primary = if entry_ix == total_entries - 1 && !is_generating { v_flex() .w_full() .child(primary) .child(self.render_thread_controls(cx)) + .when_some( + self.thread_feedback.comments_editor.clone(), + |this, editor| { + this.child(Self::render_feedback_feedback_editor(editor, window, cx)) + }, + ) + .into_any_element() + } else { + primary + }; + + if let Some(editing_index) = self.editing_message.as_ref() + && *editing_index < entry_ix + { + let backdrop = div() + .id(("backdrop", entry_ix)) + .size_full() + .absolute() + .inset_0() + .bg(cx.theme().colors().panel_background) + .opacity(0.8) + .block_mouse_except_scroll() + .on_click(cx.listener(Self::cancel_editing)); + + div() + .relative() + .child(primary) + .child(backdrop) .into_any_element() } else { primary @@ -889,7 +1370,7 @@ impl AcpThreadView { } fn tool_card_border_color(&self, cx: &Context) -> Hsla { - cx.theme().colors().border.opacity(0.6) + cx.theme().colors().border.opacity(0.8) } fn tool_name_font_size(&self) -> Rems { @@ -999,24 +1480,31 @@ impl AcpThreadView { tool_call: &ToolCall, cx: &Context, ) -> Div { - let tool_icon = Icon::new(match tool_call.kind { - acp::ToolKind::Read => IconName::ToolRead, - acp::ToolKind::Edit => IconName::ToolPencil, - acp::ToolKind::Delete => IconName::ToolDeleteFile, - acp::ToolKind::Move => IconName::ArrowRightLeft, - acp::ToolKind::Search => IconName::ToolSearch, - acp::ToolKind::Execute => IconName::ToolTerminal, - acp::ToolKind::Think => IconName::ToolThink, - acp::ToolKind::Fetch => IconName::ToolWeb, - acp::ToolKind::Other => IconName::ToolHammer, - }) - .size(IconSize::Small) - .color(Color::Muted); + let tool_icon = + if tool_call.kind == acp::ToolKind::Edit && tool_call.locations.len() == 1 { + FileIcons::get_icon(&tool_call.locations[0].path, cx) + .map(Icon::from_path) + .unwrap_or(Icon::new(IconName::ToolPencil)) + } else { + Icon::new(match tool_call.kind { + acp::ToolKind::Read => IconName::ToolRead, + acp::ToolKind::Edit => IconName::ToolPencil, + acp::ToolKind::Delete => IconName::ToolDeleteFile, + acp::ToolKind::Move => IconName::ArrowRightLeft, + acp::ToolKind::Search => IconName::ToolSearch, + acp::ToolKind::Execute => IconName::ToolTerminal, + acp::ToolKind::Think => IconName::ToolThink, + acp::ToolKind::Fetch => IconName::ToolWeb, + acp::ToolKind::Other => IconName::ToolHammer, + }) + } + .size(IconSize::Small) + .color(Color::Muted); + + let base_container = h_flex().size_4().justify_center(); if is_collapsible { - h_flex() - .size_4() - .justify_center() + base_container .child( div() .group_hover(&group_name, |s| s.invisible().w_0()) @@ -1047,7 +1535,7 @@ impl AcpThreadView { ), ) } else { - div().child(tool_icon) + base_container.child(tool_icon) } } @@ -1062,14 +1550,10 @@ impl AcpThreadView { let card_header_id = SharedString::from("inner-tool-call-header"); let status_icon = match &tool_call.status { - ToolCallStatus::Allowed { - status: acp::ToolCallStatus::Pending, - } - | ToolCallStatus::WaitingForConfirmation { .. } => None, - ToolCallStatus::Allowed { - status: acp::ToolCallStatus::InProgress, - .. - } => Some( + ToolCallStatus::Pending + | ToolCallStatus::WaitingForConfirmation { .. } + | ToolCallStatus::Completed => None, + ToolCallStatus::InProgress => Some( Icon::new(IconName::ArrowCircle) .color(Color::Accent) .size(IconSize::Small) @@ -1080,36 +1564,27 @@ impl AcpThreadView { ) .into_any(), ), - ToolCallStatus::Allowed { - status: acp::ToolCallStatus::Completed, - .. - } => None, - ToolCallStatus::Rejected - | ToolCallStatus::Canceled - | ToolCallStatus::Allowed { - status: acp::ToolCallStatus::Failed, - .. - } => Some( - Icon::new(IconName::X) + ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed => Some( + Icon::new(IconName::Close) .color(Color::Error) .size(IconSize::Small) .into_any_element(), ), }; - let needs_confirmation = match &tool_call.status { - ToolCallStatus::WaitingForConfirmation { .. } => true, - _ => tool_call - .content - .iter() - .any(|content| matches!(content, ToolCallContent::Diff { .. })), - }; + let needs_confirmation = matches!( + tool_call.status, + ToolCallStatus::WaitingForConfirmation { .. } + ); + let is_edit = + matches!(tool_call.kind, acp::ToolKind::Edit) || tool_call.diffs().next().is_some(); + let use_card_layout = needs_confirmation || is_edit; let is_collapsible = !tool_call.content.is_empty() && !needs_confirmation; - let is_open = !is_collapsible || self.expanded_tool_calls.contains(&tool_call.id); - let gradient_color = cx.theme().colors().panel_background; - let gradient_overlay = { + let is_open = needs_confirmation || self.expanded_tool_calls.contains(&tool_call.id); + + let gradient_overlay = |color: Hsla| { div() .absolute() .top_0() @@ -1118,13 +1593,63 @@ impl AcpThreadView { .h_full() .bg(linear_gradient( 90., - linear_color_stop(gradient_color, 1.), - linear_color_stop(gradient_color.opacity(0.2), 0.), + linear_color_stop(color, 1.), + linear_color_stop(color.opacity(0.2), 0.), )) }; + let gradient_color = if use_card_layout { + self.tool_card_header_bg(cx) + } else { + cx.theme().colors().panel_background + }; + + let tool_output_display = if is_open { + match &tool_call.status { + ToolCallStatus::WaitingForConfirmation { options, .. } => { + v_flex() + .w_full() + .children(tool_call.content.iter().map(|content| { + div() + .child(self.render_tool_call_content( + entry_ix, content, tool_call, window, cx, + )) + .into_any_element() + })) + .child(self.render_permission_buttons( + options, + entry_ix, + tool_call.id.clone(), + tool_call.content.is_empty(), + cx, + )) + .into_any() + } + ToolCallStatus::Pending | ToolCallStatus::InProgress + if is_edit && tool_call.content.is_empty() => + { + self.render_diff_loading(cx).into_any() + } + ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::Completed + | ToolCallStatus::Failed + | ToolCallStatus::Canceled => v_flex() + .w_full() + .children(tool_call.content.iter().map(|content| { + div().child( + self.render_tool_call_content(entry_ix, content, tool_call, window, cx), + ) + })) + .into_any(), + ToolCallStatus::Rejected => Empty.into_any(), + } + .into() + } else { + None + }; v_flex() - .when(needs_confirmation, |this| { + .when(use_card_layout, |this| { this.rounded_lg() .border_1() .border_color(self.tool_card_border_color(cx)) @@ -1138,13 +1663,15 @@ impl AcpThreadView { .gap_1() .justify_between() .map(|this| { - if needs_confirmation { + if use_card_layout { this.pl_2() - .pr_1() + .pr_1p5() .py_1() .rounded_t_md() - .border_b_1() - .border_color(self.tool_card_border_color(cx)) + .when(is_open, |this| { + this.border_b_1() + .border_color(self.tool_card_border_color(cx)) + }) .bg(self.tool_card_header_bg(cx)) } else { this.opacity(0.8).hover(|style| style.opacity(1.)) @@ -1155,13 +1682,7 @@ impl AcpThreadView { .group(&card_header_id) .relative() .w_full() - .map(|this| { - if tool_call.locations.len() == 1 { - this.gap_0() - } else { - this.gap_1p5() - } - }) + .min_h_6() .text_size(self.tool_name_font_size()) .child(self.render_tool_call_icon( card_header_id, @@ -1205,6 +1726,7 @@ impl AcpThreadView { .id("non-card-label-container") .w_full() .relative() + .ml_1p5() .overflow_hidden() .child( h_flex() @@ -1214,14 +1736,10 @@ impl AcpThreadView { .overflow_x_scroll() .child(self.render_markdown( tool_call.label.clone(), - default_markdown_style( - needs_confirmation, - window, - cx, - ), + default_markdown_style(false, window, cx), )), ) - .child(gradient_overlay) + .child(gradient_overlay(gradient_color)) .on_click(cx.listener({ let id = tool_call.id.clone(); move |this: &mut Self, _, _, cx: &mut Context| { @@ -1238,106 +1756,135 @@ impl AcpThreadView { ) .children(status_icon), ) - .when(is_open, |this| { - this.child( - v_flex() - .text_xs() - .when(is_collapsible, |this| { - this.mt_1() - .border_1() - .border_color(self.tool_card_border_color(cx)) - .bg(cx.theme().colors().editor_background) - .rounded_lg() - }) - .map(|this| { - if is_open { - match &tool_call.status { - ToolCallStatus::WaitingForConfirmation { options, .. } => this - .children(tool_call.content.iter().map(|content| { - div() - .py_1p5() - .child( - self.render_tool_call_content( - content, window, cx, - ), - ) - .into_any_element() - })) - .child(self.render_permission_buttons( - options, - entry_ix, - tool_call.id.clone(), - tool_call.content.is_empty(), - cx, - )), - ToolCallStatus::Allowed { .. } | ToolCallStatus::Canceled => { - this.children(tool_call.content.iter().map(|content| { - div() - .py_1p5() - .child( - self.render_tool_call_content( - content, window, cx, - ), - ) - .into_any_element() - })) - } - ToolCallStatus::Rejected => this, - } - } else { - this - } - }), - ) - }) + .children(tool_output_display) } fn render_tool_call_content( &self, + entry_ix: usize, content: &ToolCallContent, + tool_call: &ToolCall, window: &Window, cx: &Context, ) -> AnyElement { match content { - ToolCallContent::ContentBlock { content } => { - if let Some(md) = content.markdown() { - div() - .p_2() - .child( - self.render_markdown( - md.clone(), - default_markdown_style(false, window, cx), - ), - ) - .into_any_element() + ToolCallContent::ContentBlock(content) => { + if let Some(resource_link) = content.resource_link() { + self.render_resource_link(resource_link, cx) + } else if let Some(markdown) = content.markdown() { + self.render_markdown_output(markdown.clone(), tool_call.id.clone(), window, cx) } else { Empty.into_any_element() } } - ToolCallContent::Diff { - diff: Diff { multibuffer, .. }, - .. - } => self.render_diff_editor(multibuffer), + ToolCallContent::Diff(diff) => self.render_diff_editor(entry_ix, diff, tool_call, cx), + ToolCallContent::Terminal(terminal) => { + self.render_terminal_tool_call(entry_ix, terminal, tool_call, window, cx) + } } } - fn render_permission_buttons( + fn render_markdown_output( &self, - options: &[acp::PermissionOption], - entry_ix: usize, + markdown: Entity, + tool_call_id: acp::ToolCallId, + window: &Window, + cx: &Context, + ) -> AnyElement { + let button_id = SharedString::from(format!("tool_output-{:?}", tool_call_id)); + + v_flex() + .mt_1p5() + .ml(px(7.)) + .px_3p5() + .gap_2() + .border_l_1() + .border_color(self.tool_card_border_color(cx)) + .text_sm() + .text_color(cx.theme().colors().text_muted) + .child(self.render_markdown(markdown, default_markdown_style(false, window, cx))) + .child( + Button::new(button_id, "Collapse Output") + .full_width() + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .icon(IconName::ChevronUp) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .on_click(cx.listener({ + move |this: &mut Self, _, _, cx: &mut Context| { + this.expanded_tool_calls.remove(&tool_call_id); + cx.notify(); + } + })), + ) + .into_any_element() + } + + fn render_resource_link( + &self, + resource_link: &acp::ResourceLink, + cx: &Context, + ) -> AnyElement { + let uri: SharedString = resource_link.uri.clone().into(); + + let label: SharedString = if let Some(path) = resource_link.uri.strip_prefix("file://") { + path.to_string().into() + } else { + uri.clone() + }; + + let button_id = SharedString::from(format!("item-{}", uri)); + + div() + .ml(px(7.)) + .pl_2p5() + .border_l_1() + .border_color(self.tool_card_border_color(cx)) + .overflow_hidden() + .child( + Button::new(button_id, label) + .label_size(LabelSize::Small) + .color(Color::Muted) + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .truncate(true) + .on_click(cx.listener({ + let workspace = self.workspace.clone(); + move |_, _, window, cx: &mut Context| { + Self::open_link(uri.clone(), &workspace, window, cx); + } + })), + ) + .into_any_element() + } + + fn render_permission_buttons( + &self, + options: &[acp::PermissionOption], + entry_ix: usize, tool_call_id: acp::ToolCallId, empty_content: bool, cx: &Context, ) -> Div { h_flex() - .p_1p5() + .py_1() + .pl_2() + .pr_1() .gap_1() - .justify_end() + .justify_between() + .flex_wrap() .when(!empty_content, |this| { this.border_t_1() .border_color(self.tool_card_border_color(cx)) }) - .children(options.iter().map(|option| { + .child( + div() + .min_w(rems_from_px(145.)) + .child(LoadingLabel::new("Waiting for Confirmation").size(LabelSize::Small)), + ) + .child(h_flex().gap_0p5().children(options.iter().map(|option| { let option_id = SharedString::from(option.id.0.clone()); Button::new((option_id, entry_ix), option.name.clone()) .map(|this| match option.kind { @@ -1348,10 +1895,10 @@ impl AcpThreadView { this.icon(IconName::CheckDouble).icon_color(Color::Success) } acp::PermissionOptionKind::RejectOnce => { - this.icon(IconName::X).icon_color(Color::Error) + this.icon(IconName::Close).icon_color(Color::Error) } acp::PermissionOptionKind::RejectAlways => { - this.icon(IconName::X).icon_color(Color::Error) + this.icon(IconName::Close).icon_color(Color::Error) } }) .icon_position(IconPosition::Start) @@ -1370,15 +1917,72 @@ impl AcpThreadView { ); } })) - })) + }))) + } + + fn render_diff_loading(&self, cx: &Context) -> AnyElement { + let bar = |n: u64, width_class: &str| { + let bg_color = cx.theme().colors().element_active; + let base = h_flex().h_1().rounded_full(); + + let modified = match width_class { + "w_4_5" => base.w_3_4(), + "w_1_4" => base.w_1_4(), + "w_2_4" => base.w_2_4(), + "w_3_5" => base.w_3_5(), + "w_2_5" => base.w_2_5(), + _ => base.w_1_2(), + }; + + modified.with_animation( + ElementId::Integer(n), + Animation::new(Duration::from_secs(2)).repeat(), + move |tab, delta| { + let delta = (delta - 0.15 * n as f32) / 0.7; + let delta = 1.0 - (0.5 - delta).abs() * 2.; + let delta = ease_in_out(delta.clamp(0., 1.)); + let delta = 0.1 + 0.9 * delta; + + tab.bg(bg_color.opacity(delta)) + }, + ) + }; + + v_flex() + .p_3() + .gap_1() + .rounded_b_md() + .bg(cx.theme().colors().editor_background) + .child(bar(0, "w_4_5")) + .child(bar(1, "w_1_4")) + .child(bar(2, "w_2_4")) + .child(bar(3, "w_3_5")) + .child(bar(4, "w_2_5")) + .into_any_element() } - fn render_diff_editor(&self, multibuffer: &Entity) -> AnyElement { + fn render_diff_editor( + &self, + entry_ix: usize, + diff: &Entity, + tool_call: &ToolCall, + cx: &Context, + ) -> AnyElement { + let tool_progress = matches!( + &tool_call.status, + ToolCallStatus::InProgress | ToolCallStatus::Pending + ); + v_flex() .h_full() .child( - if let Some(editor) = self.diff_editors.get(&multibuffer.entity_id()) { - editor.clone().into_any_element() + if let Some(entry) = self.entry_view_state.read(cx).entry(entry_ix) + && let Some(editor) = entry.editor_for_diff(diff) + && diff.read(cx).has_revealed_range(cx) + { + editor.into_any_element() + } else if tool_progress { + self.render_diff_loading(cx) } else { Empty.into_any() }, @@ -1386,6 +1990,254 @@ impl AcpThreadView { .into_any() } + fn render_terminal_tool_call( + &self, + entry_ix: usize, + terminal: &Entity, + tool_call: &ToolCall, + window: &Window, + cx: &Context, + ) -> AnyElement { + let terminal_data = terminal.read(cx); + let working_dir = terminal_data.working_dir(); + let command = terminal_data.command(); + let started_at = terminal_data.started_at(); + + let tool_failed = matches!( + &tool_call.status, + ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed + ); + + let output = terminal_data.output(); + let command_finished = output.is_some(); + let truncated_output = output.is_some_and(|output| output.was_content_truncated); + let output_line_count = output.map(|output| output.content_line_count).unwrap_or(0); + + let command_failed = command_finished + && output.is_some_and(|o| o.exit_status.is_none_or(|status| !status.success())); + + let time_elapsed = if let Some(output) = output { + output.ended_at.duration_since(started_at) + } else { + started_at.elapsed() + }; + + let header_bg = cx + .theme() + .colors() + .element_background + .blend(cx.theme().colors().editor_foreground.opacity(0.025)); + let border_color = cx.theme().colors().border.opacity(0.6); + + let working_dir = working_dir + .as_ref() + .map(|path| format!("{}", path.display())) + .unwrap_or_else(|| "current directory".to_string()); + + let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); + + let header = h_flex() + .id(SharedString::from(format!( + "terminal-tool-header-{}", + terminal.entity_id() + ))) + .flex_none() + .gap_1() + .justify_between() + .rounded_t_md() + .child( + div() + .id(("command-target-path", terminal.entity_id())) + .w_full() + .max_w_full() + .overflow_x_scroll() + .child( + Label::new(working_dir) + .buffer_font(cx) + .size(LabelSize::XSmall) + .color(Color::Muted), + ), + ) + .when(!command_finished, |header| { + header + .gap_1p5() + .child( + Button::new( + SharedString::from(format!("stop-terminal-{}", terminal.entity_id())), + "Stop", + ) + .icon(IconName::Stop) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .icon_color(Color::Error) + .label_size(LabelSize::Small) + .tooltip(move |window, cx| { + Tooltip::with_meta( + "Stop This Command", + None, + "Also possible by placing your cursor inside the terminal and using regular terminal bindings.", + window, + cx, + ) + }) + .on_click({ + let terminal = terminal.clone(); + cx.listener(move |_this, _event, _window, cx| { + let inner_terminal = terminal.read(cx).inner().clone(); + inner_terminal.update(cx, |inner_terminal, _cx| { + inner_terminal.kill_active_task(); + }); + }) + }), + ) + .child(Divider::vertical()) + .child( + Icon::new(IconName::ArrowCircle) + .size(IconSize::XSmall) + .color(Color::Info) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| { + icon.transform(Transformation::rotate(percentage(delta))) + }, + ), + ) + }) + .when(tool_failed || command_failed, |header| { + header.child( + div() + .id(("terminal-tool-error-code-indicator", terminal.entity_id())) + .child( + Icon::new(IconName::Close) + .size(IconSize::Small) + .color(Color::Error), + ) + .when_some(output.and_then(|o| o.exit_status), |this, status| { + this.tooltip(Tooltip::text(format!( + "Exited with code {}", + status.code().unwrap_or(-1), + ))) + }), + ) + }) + .when(truncated_output, |header| { + let tooltip = if let Some(output) = output { + if output_line_count + 10 > terminal::MAX_SCROLL_HISTORY_LINES { + "Output exceeded terminal max lines and was \ + truncated, the model received the first 16 KB." + .to_string() + } else { + format!( + "Output is {} long—to avoid unexpected token usage, \ + only 16 KB was sent back to the model.", + format_file_size(output.original_content_len as u64, true), + ) + } + } else { + "Output was truncated".to_string() + }; + + header.child( + h_flex() + .id(("terminal-tool-truncated-label", terminal.entity_id())) + .gap_1() + .child( + Icon::new(IconName::Info) + .size(IconSize::XSmall) + .color(Color::Ignored), + ) + .child( + Label::new("Truncated") + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + .tooltip(Tooltip::text(tooltip)), + ) + }) + .when(time_elapsed > Duration::from_secs(10), |header| { + header.child( + Label::new(format!("({})", duration_alt_display(time_elapsed))) + .buffer_font(cx) + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + }) + .child( + Disclosure::new( + SharedString::from(format!( + "terminal-tool-disclosure-{}", + terminal.entity_id() + )), + is_expanded, + ) + .opened_icon(IconName::ChevronUp) + .closed_icon(IconName::ChevronDown) + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this, _event, _window, _cx| { + if is_expanded { + this.expanded_tool_calls.remove(&id); + } else { + this.expanded_tool_calls.insert(id.clone()); + } + } + })), + ); + + let terminal_view = self + .entry_view_state + .read(cx) + .entry(entry_ix) + .and_then(|entry| entry.terminal(terminal)); + let show_output = is_expanded && terminal_view.is_some(); + + v_flex() + .mb_2() + .border_1() + .when(tool_failed || command_failed, |card| card.border_dashed()) + .border_color(border_color) + .rounded_lg() + .overflow_hidden() + .child( + v_flex() + .py_1p5() + .pl_2() + .pr_1p5() + .gap_0p5() + .bg(header_bg) + .text_xs() + .child(header) + .child( + MarkdownElement::new( + command.clone(), + terminal_command_markdown_style(window, cx), + ) + .code_block_renderer( + markdown::CodeBlockRenderer::Default { + copy_button: false, + copy_button_on_hover: true, + border: false, + }, + ), + ), + ) + .when(show_output, |this| { + this.child( + div() + .pt_2() + .border_t_1() + .when(tool_failed || command_failed, |card| card.border_dashed()) + .border_color(border_color) + .bg(cx.theme().colors().editor_background) + .rounded_b_md() + .text_ui_sm(cx) + .children(terminal_view.clone()), + ) + }) + .into_any() + } + fn render_agent_logo(&self) -> AnyElement { Icon::new(self.agent.logo()) .color(Color::Muted) @@ -1404,98 +2256,384 @@ impl AcpThreadView { .justify_center() .child(div().opacity(0.3).child(logo)) .child( - h_flex().absolute().right_1().bottom_0().child( - Icon::new(IconName::XCircle) - .color(Color::Error) - .size(IconSize::Small), - ), + h_flex() + .absolute() + .right_1() + .bottom_0() + .child(Icon::new(IconName::XCircleFilled).color(Color::Error)), ) .into_any_element() } - fn render_empty_state(&self, cx: &App) -> AnyElement { - let loading = matches!(&self.thread_state, ThreadState::Loading { .. }); + fn render_rules_item(&self, cx: &Context) -> Option { + let project_context = self + .as_native_thread(cx)? + .read(cx) + .project_context() + .read(cx); - v_flex() - .size_full() - .items_center() - .justify_center() - .child(if loading { - h_flex() - .justify_center() - .child(self.render_agent_logo()) - .with_animation( - "pulsating_icon", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 1.0)), - |icon, delta| icon.opacity(delta), + let user_rules_text = if project_context.user_rules.is_empty() { + None + } else if project_context.user_rules.len() == 1 { + let user_rules = &project_context.user_rules[0]; + + match user_rules.title.as_ref() { + Some(title) => Some(format!("Using \"{title}\" user rule")), + None => Some("Using user rule".into()), + } + } else { + Some(format!( + "Using {} user rules", + project_context.user_rules.len() + )) + }; + + let first_user_rules_id = project_context + .user_rules + .first() + .map(|user_rules| user_rules.uuid.0); + + let rules_files = project_context + .worktrees + .iter() + .filter_map(|worktree| worktree.rules_file.as_ref()) + .collect::>(); + + let rules_file_text = match rules_files.as_slice() { + &[] => None, + &[rules_file] => Some(format!( + "Using project {:?} file", + rules_file.path_in_worktree + )), + rules_files => Some(format!("Using {} project rules files", rules_files.len())), + }; + + if user_rules_text.is_none() && rules_file_text.is_none() { + return None; + } + + Some( + v_flex() + .px_2p5() + .gap_1() + .when_some(user_rules_text, |parent, user_rules_text| { + parent.child( + h_flex() + .group("user-rules") + .id("user-rules") + .w_full() + .child( + Icon::new(IconName::Reader) + .size(IconSize::XSmall) + .color(Color::Disabled), + ) + .child( + Label::new(user_rules_text) + .size(LabelSize::XSmall) + .color(Color::Muted) + .truncate() + .buffer_font(cx) + .ml_1p5() + .mr_0p5(), + ) + .child( + IconButton::new("open-prompt-library", IconName::ArrowUpRight) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Ignored) + .visible_on_hover("user-rules") + // TODO: Figure out a way to pass focus handle here so we can display the `OpenRulesLibrary` keybinding + .tooltip(Tooltip::text("View User Rules")), + ) + .on_click(move |_event, window, cx| { + window.dispatch_action( + Box::new(OpenRulesLibrary { + prompt_to_select: first_user_rules_id, + }), + cx, + ) + }), ) - .into_any() - } else { - self.render_agent_logo().into_any_element() - }) - .child(h_flex().mt_4().mb_1().justify_center().child(if loading { - div() - .child(LoadingLabel::new("").size(LabelSize::Large)) - .into_any_element() - } else { - Headline::new(self.agent.empty_state_headline()) - .size(HeadlineSize::Medium) - .into_any_element() - })) - .child( - div() - .max_w_1_2() - .text_sm() - .text_center() - .map(|this| { - if loading { - this.invisible() - } else { - this.text_color(cx.theme().colors().text_muted) - } - }) - .child(self.agent.empty_state_message()), - ) - .into_any() + }) + .when_some(rules_file_text, |parent, rules_file_text| { + parent.child( + h_flex() + .group("project-rules") + .id("project-rules") + .w_full() + .child( + Icon::new(IconName::Reader) + .size(IconSize::XSmall) + .color(Color::Disabled), + ) + .child( + Label::new(rules_file_text) + .size(LabelSize::XSmall) + .color(Color::Muted) + .buffer_font(cx) + .ml_1p5() + .mr_0p5(), + ) + .child( + IconButton::new("open-rule", IconName::ArrowUpRight) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Ignored) + .visible_on_hover("project-rules") + .tooltip(Tooltip::text("View Project Rules")), + ) + .on_click(cx.listener(Self::handle_open_rules)), + ) + }) + .into_any(), + ) } - fn render_pending_auth_state(&self) -> AnyElement { - v_flex() - .items_center() - .justify_center() - .child(self.render_error_agent_logo()) - .child( - h_flex() - .mt_4() - .mb_1() - .justify_center() - .child(Headline::new("Not Authenticated").size(HeadlineSize::Medium)), - ) - .into_any() + fn render_empty_state_section_header( + &self, + label: impl Into, + action_slot: Option, + cx: &mut Context, + ) -> impl IntoElement { + div().pl_1().pr_1p5().child( + h_flex() + .mt_2() + .pl_1p5() + .pb_1() + .w_full() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child( + Label::new(label.into()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .children(action_slot), + ) } - fn render_server_exited(&self, status: ExitStatus, _cx: &Context) -> AnyElement { + fn render_empty_state(&self, window: &mut Window, cx: &mut Context) -> AnyElement { + let loading = matches!(&self.thread_state, ThreadState::Loading { .. }); + let render_history = self + .agent + .clone() + .downcast::() + .is_some() + && self + .history_store + .update(cx, |history_store, cx| !history_store.is_empty(cx)); + v_flex() - .items_center() - .justify_center() - .child(self.render_error_agent_logo()) - .child( - v_flex() - .mt_4() - .mb_2() - .gap_0p5() - .text_center() + .size_full() + .when(!render_history, |this| { + this.child( + v_flex() + .size_full() + .items_center() + .justify_center() + .child(if loading { + h_flex() + .justify_center() + .child(self.render_agent_logo()) + .with_animation( + "pulsating_icon", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 1.0)), + |icon, delta| icon.opacity(delta), + ) + .into_any() + } else { + self.render_agent_logo().into_any_element() + }) + .child(h_flex().mt_4().mb_2().justify_center().child(if loading { + div() + .child(LoadingLabel::new("").size(LabelSize::Large)) + .into_any_element() + } else { + Headline::new(self.agent.empty_state_headline()) + .size(HeadlineSize::Medium) + .into_any_element() + })), + ) + }) + .when(render_history, |this| { + let recent_history = self + .history_store + .update(cx, |history_store, cx| history_store.recent_entries(3, cx)); + this.justify_end().child( + v_flex() + .child( + self.render_empty_state_section_header( + "Recent", + Some( + Button::new("view-history", "View All") + .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small) + .key_binding( + KeyBinding::for_action_in( + &OpenHistory, + &self.focus_handle(cx), + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(move |_event, window, cx| { + window.dispatch_action(OpenHistory.boxed_clone(), cx); + }) + .into_any_element(), + ), + cx, + ), + ) + .child( + v_flex().p_1().pr_1p5().gap_1().children( + recent_history + .into_iter() + .enumerate() + .map(|(index, entry)| { + // TODO: Add keyboard navigation. + let is_hovered = + self.hovered_recent_history_item == Some(index); + crate::acp::thread_history::AcpHistoryEntryElement::new( + entry, + cx.entity().downgrade(), + ) + .hovered(is_hovered) + .on_hover(cx.listener( + move |this, is_hovered, _window, cx| { + if *is_hovered { + this.hovered_recent_history_item = Some(index); + } else if this.hovered_recent_history_item + == Some(index) + { + this.hovered_recent_history_item = None; + } + cx.notify(); + }, + )) + .into_any_element() + }), + ), + ), + ) + }) + .into_any() + } + + fn render_auth_required_state( + &self, + connection: &Rc, + description: Option<&Entity>, + configuration_view: Option<&AnyView>, + pending_auth_method: Option<&acp::AuthMethodId>, + window: &mut Window, + cx: &Context, + ) -> Div { + v_flex() + .p_2() + .gap_2() + .flex_1() + .items_center() + .justify_center() + .child( + v_flex() .items_center() - .child(Headline::new("Server exited unexpectedly").size(HeadlineSize::Medium)) + .justify_center() + .child(self.render_error_agent_logo()) .child( - Label::new(format!("Exit status: {}", status.code().unwrap_or(-127))) - .size(LabelSize::Small) - .color(Color::Muted), - ), + h_flex().mt_4().mb_1().justify_center().child( + Headline::new("Authentication Required").size(HeadlineSize::Medium), + ), + ) + .into_any(), + ) + .children(description.map(|desc| { + div().text_ui(cx).text_center().child( + self.render_markdown(desc.clone(), default_markdown_style(false, window, cx)), + ) + })) + .children( + configuration_view + .cloned() + .map(|view| div().px_4().w_full().max_w_128().child(view)), + ) + .when( + configuration_view.is_none() + && description.is_none() + && pending_auth_method.is_none(), + |el| { + el.child( + div() + .text_ui(cx) + .text_center() + .px_4() + .w_full() + .max_w_128() + .child(Label::new("Authentication required")), + ) + }, + ) + .when_some(pending_auth_method, |el, _| { + let spinner_icon = div() + .px_0p5() + .id("generating") + .tooltip(Tooltip::text("Generating Changes…")) + .child( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| { + icon.transform(Transformation::rotate(percentage(delta))) + }, + ) + .into_any_element(), + ) + .into_any(); + el.child( + h_flex() + .text_ui(cx) + .text_center() + .justify_center() + .gap_2() + .px_4() + .w_full() + .max_w_128() + .child(Label::new("Authenticating...")) + .child(spinner_icon), + ) + }) + .child( + h_flex() + .mt_1p5() + .gap_1() + .flex_wrap() + .justify_center() + .children(connection.auth_methods().iter().enumerate().rev().map( + |(ix, method)| { + Button::new( + SharedString::from(method.id.0.clone()), + method.name.clone(), + ) + .style(ButtonStyle::Outlined) + .when(ix == 0, |el| { + el.style(ButtonStyle::Tinted(ui::TintColor::Accent)) + }) + .size(ButtonSize::Medium) + .label_size(LabelSize::Small) + .on_click({ + let method_id = method.id.clone(); + cx.listener(move |this, _, window, cx| { + this.authenticate(method_id.clone(), window, cx) + }) + }) + }, + )), ) - .into_any_element() } fn render_load_error(&self, e: &LoadError, cx: &Context) -> AnyElement { @@ -1526,39 +2664,104 @@ impl AcpThreadView { { let upgrade_message = upgrade_message.clone(); let upgrade_command = upgrade_command.clone(); - container = container.child(Button::new("upgrade", upgrade_message).on_click( - cx.listener(move |this, _, window, cx| { - this.workspace - .update(cx, |workspace, cx| { - let project = workspace.project().read(cx); - let cwd = project.first_project_directory(cx); - let shell = project.terminal_settings(&cwd, cx).shell.clone(); - let spawn_in_terminal = task::SpawnInTerminal { - id: task::TaskId("install".to_string()), - full_label: upgrade_command.clone(), - label: upgrade_command.clone(), - command: Some(upgrade_command.clone()), - args: Vec::new(), - command_label: upgrade_command.clone(), - cwd, - env: Default::default(), - use_new_terminal: true, - allow_concurrent_runs: true, - reveal: Default::default(), - reveal_target: Default::default(), - hide: Default::default(), - shell, - show_summary: true, - show_command: true, - show_rerun: false, - }; - workspace - .spawn_in_terminal(spawn_in_terminal, window, cx) - .detach(); + container = container.child( + Button::new("upgrade", upgrade_message) + .tooltip(Tooltip::text(upgrade_command.clone())) + .on_click(cx.listener(move |this, _, window, cx| { + let task = this + .workspace + .update(cx, |workspace, cx| { + let project = workspace.project().read(cx); + let cwd = project.first_project_directory(cx); + let shell = project.terminal_settings(&cwd, cx).shell.clone(); + let spawn_in_terminal = task::SpawnInTerminal { + id: task::TaskId("upgrade".to_string()), + full_label: upgrade_command.clone(), + label: upgrade_command.clone(), + command: Some(upgrade_command.clone()), + args: Vec::new(), + command_label: upgrade_command.clone(), + cwd, + env: Default::default(), + use_new_terminal: true, + allow_concurrent_runs: true, + reveal: Default::default(), + reveal_target: Default::default(), + hide: Default::default(), + shell, + show_summary: true, + show_command: true, + show_rerun: false, + }; + workspace.spawn_in_terminal(spawn_in_terminal, window, cx) + }) + .ok(); + let Some(task) = task else { return }; + cx.spawn_in(window, async move |this, cx| { + if let Some(Ok(_)) = task.await { + this.update_in(cx, |this, window, cx| { + this.reset(window, cx); + }) + .ok(); + } }) - .ok(); - }), - )); + .detach() + })), + ); + } else if let LoadError::NotInstalled { + install_message, + install_command, + .. + } = e + { + let install_message = install_message.clone(); + let install_command = install_command.clone(); + container = container.child( + Button::new("install", install_message) + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .size(ButtonSize::Medium) + .tooltip(Tooltip::text(install_command.clone())) + .on_click(cx.listener(move |this, _, window, cx| { + let task = this + .workspace + .update(cx, |workspace, cx| { + let project = workspace.project().read(cx); + let cwd = project.first_project_directory(cx); + let shell = project.terminal_settings(&cwd, cx).shell.clone(); + let spawn_in_terminal = task::SpawnInTerminal { + id: task::TaskId("install".to_string()), + full_label: install_command.clone(), + label: install_command.clone(), + command: Some(install_command.clone()), + args: Vec::new(), + command_label: install_command.clone(), + cwd, + env: Default::default(), + use_new_terminal: true, + allow_concurrent_runs: true, + reveal: Default::default(), + reveal_target: Default::default(), + hide: Default::default(), + shell, + show_summary: true, + show_command: true, + show_rerun: false, + }; + workspace.spawn_in_terminal(spawn_in_terminal, window, cx) + }) + .ok(); + let Some(task) = task else { return }; + cx.spawn_in(window, async move |this, cx| { + if let Some(Ok(_)) = task.await { + this.update_in(cx, |this, window, cx| { + this.reset(window, cx); + }) + .ok(); + } + }) + .detach() + })), + ); } container.into_any() @@ -1605,24 +2808,25 @@ impl AcpThreadView { parent.child(self.render_plan_entries(plan, window, cx)) }) }) - .when(!changed_buffers.is_empty(), |this| { + .when(!plan.is_empty() && !changed_buffers.is_empty(), |this| { this.child(Divider::horizontal().color(DividerColor::Border)) - .child(self.render_edits_summary( + }) + .when(!changed_buffers.is_empty(), |this| { + this.child(self.render_edits_summary( + &changed_buffers, + self.edits_expanded, + pending_edits, + window, + cx, + )) + .when(self.edits_expanded, |parent| { + parent.child(self.render_edited_files( action_log, &changed_buffers, - self.edits_expanded, pending_edits, - window, cx, )) - .when(self.edits_expanded, |parent| { - parent.child(self.render_edited_files( - action_log, - &changed_buffers, - pending_edits, - cx, - )) - }) + }) }) .into_any() .into() @@ -1760,7 +2964,6 @@ impl AcpThreadView { fn render_edits_summary( &self, - action_log: &Entity, changed_buffers: &BTreeMap, Entity>, expanded: bool, pending_edits: bool, @@ -1871,14 +3074,9 @@ impl AcpThreadView { ) .map(|kb| kb.size(rems_from_px(10.))), ) - .on_click({ - let action_log = action_log.clone(); - cx.listener(move |_, _, _, cx| { - action_log.update(cx, |action_log, cx| { - action_log.reject_all_edits(cx).detach(); - }) - }) - }), + .on_click(cx.listener(move |this, _, window, cx| { + this.reject_all(&RejectAll, window, cx); + })), ) .child( Button::new("keep-all-changes", "Keep All") @@ -1891,14 +3089,9 @@ impl AcpThreadView { KeyBinding::for_action_in(&KeepAll, &focus_handle, window, cx) .map(|kb| kb.size(rems_from_px(10.))), ) - .on_click({ - let action_log = action_log.clone(); - cx.listener(move |_, _, _, cx| { - action_log.update(cx, |action_log, cx| { - action_log.keep_all_edits(cx); - }) - }) - }), + .on_click(cx.listener(move |this, _, window, cx| { + this.keep_all(&KeepAll, window, cx); + })), ), ) } @@ -1912,7 +3105,7 @@ impl AcpThreadView { ) -> Div { let editor_bg_color = cx.theme().colors().editor_background; - v_flex().children(changed_buffers.into_iter().enumerate().flat_map( + v_flex().children(changed_buffers.iter().enumerate().flat_map( |(index, (buffer, _diff))| { let file = buffer.read(cx).file()?; let path = file.path(); @@ -1938,7 +3131,7 @@ impl AcpThreadView { .buffer_font(cx) }); - let file_icon = FileIcons::get_icon(&path, cx) + let file_icon = FileIcons::get_icon(path, cx) .map(Icon::from_path) .map(|icon| icon.color(Color::Muted).size(IconSize::Small)) .unwrap_or_else(|| { @@ -2064,6 +3257,17 @@ impl AcpThreadView { v_flex() .on_action(cx.listener(Self::expand_message_editor)) + .on_action(cx.listener(|this, _: &ToggleProfileSelector, window, cx| { + if let Some(profile_selector) = this.profile_selector.as_ref() { + profile_selector.read(cx).menu_handle().toggle(window, cx); + } + })) + .on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| { + if let Some(model_selector) = this.model_selector.as_ref() { + model_selector + .update(cx, |model_selector, cx| model_selector.toggle(window, cx)); + } + })) .p_2() .gap_2() .border_t_1() @@ -2078,34 +3282,7 @@ impl AcpThreadView { .size_full() .pt_1() .pr_2p5() - .child(div().flex_1().child({ - let settings = ThemeSettings::get_global(cx); - let font_size = TextSize::Small - .rems(cx) - .to_pixels(settings.agent_font_size(cx)); - let line_height = settings.buffer_line_height.value() * font_size; - - let text_style = TextStyle { - color: cx.theme().colors().text, - font_family: settings.buffer_font.family.clone(), - font_fallbacks: settings.buffer_font.fallbacks.clone(), - font_features: settings.buffer_font.features.clone(), - font_size: font_size.into(), - line_height: line_height.into(), - ..Default::default() - }; - - EditorElement::new( - &self.message_editor, - EditorStyle { - background: editor_bg_color, - local_player: cx.theme().players().local(), - text: text_style, - syntax: cx.theme().syntax().clone(), - ..Default::default() - }, - ) - })) + .child(self.message_editor.clone()) .child( h_flex() .absolute() @@ -2115,10 +3292,9 @@ impl AcpThreadView { .hover(|this| this.opacity(1.0)) .child( IconButton::new("toggle-height", expand_icon) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .tooltip({ - let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( expand_tooltip, @@ -2138,40 +3314,199 @@ impl AcpThreadView { .child( h_flex() .flex_none() + .flex_wrap() .justify_between() - .child(self.render_follow_toggle(cx)) - .child(self.render_send_button(cx)), + .child( + h_flex() + .child(self.render_follow_toggle(cx)) + .children(self.render_burn_mode_toggle(cx)), + ) + .child( + h_flex() + .gap_1() + .children(self.render_token_usage(cx)) + .children(self.profile_selector.clone()) + .children(self.model_selector.clone()) + .child(self.render_send_button(cx)), + ), ) .into_any() } - fn render_send_button(&self, cx: &mut Context) -> AnyElement { - if self.thread().map_or(true, |thread| { - thread.read(cx).status() == ThreadStatus::Idle - }) { - let is_editor_empty = self.message_editor.read(cx).is_empty(cx); - IconButton::new("send-message", IconName::Send) - .icon_color(Color::Accent) - .style(ButtonStyle::Filled) - .disabled(self.thread().is_none() || is_editor_empty) - .when(!is_editor_empty, |button| { - button.tooltip(move |window, cx| Tooltip::for_action("Send", &Chat, window, cx)) - }) - .when(is_editor_empty, |button| { - button.tooltip(Tooltip::text("Type a message to submit")) - }) - .on_click(cx.listener(|this, _, window, cx| { - this.chat(&Chat, window, cx); - })) - .into_any_element() + pub(crate) fn as_native_connection( + &self, + cx: &App, + ) -> Option> { + let acp_thread = self.thread()?.read(cx); + acp_thread.connection().clone().downcast() + } + + pub(crate) fn as_native_thread(&self, cx: &App) -> Option> { + let acp_thread = self.thread()?.read(cx); + self.as_native_connection(cx)? + .thread(acp_thread.session_id(), cx) + } + + fn is_using_zed_ai_models(&self, cx: &App) -> bool { + self.as_native_thread(cx) + .and_then(|thread| thread.read(cx).model()) + .is_some_and(|model| model.provider_id() == language_model::ZED_CLOUD_PROVIDER_ID) + } + + fn render_token_usage(&self, cx: &mut Context) -> Option
{ + let thread = self.thread()?.read(cx); + let usage = thread.token_usage()?; + let is_generating = thread.status() != ThreadStatus::Idle; + + let used = crate::text_thread_editor::humanize_token_count(usage.used_tokens); + let max = crate::text_thread_editor::humanize_token_count(usage.max_tokens); + + Some( + h_flex() + .flex_shrink_0() + .gap_0p5() + .mr_1p5() + .child( + Label::new(used) + .size(LabelSize::Small) + .color(Color::Muted) + .map(|label| { + if is_generating { + label + .with_animation( + "used-tokens-label", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.6, 1.)), + |label, delta| label.alpha(delta), + ) + .into_any() + } else { + label.into_any_element() + } + }), + ) + .child( + Label::new("/") + .size(LabelSize::Small) + .color(Color::Custom(cx.theme().colors().text_muted.opacity(0.5))), + ) + .child(Label::new(max).size(LabelSize::Small).color(Color::Muted)), + ) + } + + fn toggle_burn_mode( + &mut self, + _: &ToggleBurnMode, + _window: &mut Window, + cx: &mut Context, + ) { + let Some(thread) = self.as_native_thread(cx) else { + return; + }; + + thread.update(cx, |thread, cx| { + let current_mode = thread.completion_mode(); + thread.set_completion_mode( + match current_mode { + CompletionMode::Burn => CompletionMode::Normal, + CompletionMode::Normal => CompletionMode::Burn, + }, + cx, + ); + }); + } + + fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context) { + let Some(thread) = self.thread() else { + return; + }; + let action_log = thread.read(cx).action_log().clone(); + action_log.update(cx, |action_log, cx| action_log.keep_all_edits(cx)); + } + + fn reject_all(&mut self, _: &RejectAll, _window: &mut Window, cx: &mut Context) { + let Some(thread) = self.thread() else { + return; + }; + let action_log = thread.read(cx).action_log().clone(); + action_log + .update(cx, |action_log, cx| action_log.reject_all_edits(cx)) + .detach(); + } + + fn render_burn_mode_toggle(&self, cx: &mut Context) -> Option { + let thread = self.as_native_thread(cx)?.read(cx); + + if thread + .model() + .is_none_or(|model| !model.supports_burn_mode()) + { + return None; + } + + let active_completion_mode = thread.completion_mode(); + let burn_mode_enabled = active_completion_mode == CompletionMode::Burn; + let icon = if burn_mode_enabled { + IconName::ZedBurnModeOn } else { - IconButton::new("stop-generation", IconName::StopFilled) + IconName::ZedBurnMode + }; + + Some( + IconButton::new("burn-mode", icon) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .toggle_state(burn_mode_enabled) + .selected_icon_color(Color::Error) + .on_click(cx.listener(|this, _event, window, cx| { + this.toggle_burn_mode(&ToggleBurnMode, window, cx); + })) + .tooltip(move |_window, cx| { + cx.new(|_| BurnModeTooltip::new().selected(burn_mode_enabled)) + .into() + }) + .into_any_element(), + ) + } + + fn render_send_button(&self, cx: &mut Context) -> AnyElement { + let is_editor_empty = self.message_editor.read(cx).is_empty(cx); + let is_generating = self + .thread() + .is_some_and(|thread| thread.read(cx).status() != ThreadStatus::Idle); + + if is_generating && is_editor_empty { + IconButton::new("stop-generation", IconName::Stop) .icon_color(Color::Error) .style(ButtonStyle::Tinted(ui::TintColor::Error)) .tooltip(move |window, cx| { Tooltip::for_action("Stop Generation", &editor::actions::Cancel, window, cx) }) - .on_click(cx.listener(|this, _event, _, cx| this.cancel(cx))) + .on_click(cx.listener(|this, _event, _, cx| this.cancel_generation(cx))) + .into_any_element() + } else { + let send_btn_tooltip = if is_editor_empty && !is_generating { + "Type to Send" + } else if is_generating { + "Stop and Send Message" + } else { + "Send" + }; + + IconButton::new("send-message", IconName::Send) + .style(ButtonStyle::Filled) + .map(|this| { + if is_editor_empty && !is_generating { + this.disabled(true).icon_color(Color::Muted) + } else { + this.icon_color(Color::Accent) + } + }) + .tooltip(move |window, cx| Tooltip::for_action(send_btn_tooltip, &Chat, window, cx)) + .on_click(cx.listener(|this, _, window, cx| { + this.send(window, cx); + })) .into_any_element() } } @@ -2233,59 +3568,137 @@ impl AcpThreadView { return; }; - if let Some(mention_path) = MentionPath::try_parse(&url) { - workspace.update(cx, |workspace, cx| { - let project = workspace.project(); - let Some((path, entry)) = project.update(cx, |project, cx| { - let path = project.find_project_path(mention_path.path(), cx)?; - let entry = project.entry_for_path(&path, cx)?; - Some((path, entry)) - }) else { - return; - }; + if let Some(mention) = MentionUri::parse(&url).log_err() { + workspace.update(cx, |workspace, cx| match mention { + MentionUri::File { abs_path } => { + let project = workspace.project(); + let Some(path) = + project.update(cx, |project, cx| project.find_project_path(abs_path, cx)) + else { + return; + }; - if entry.is_dir() { - project.update(cx, |_, cx| { - cx.emit(project::Event::RevealInProjectPanel(entry.id)); - }); - } else { workspace .open_path(path, None, true, window, cx) .detach_and_log_err(cx); } - }) - } else { - cx.open_url(&url); - } - } + MentionUri::Directory { abs_path } => { + let project = workspace.project(); + let Some(entry) = project.update(cx, |project, cx| { + let path = project.find_project_path(abs_path, cx)?; + project.entry_for_path(&path, cx) + }) else { + return; + }; - fn open_tool_call_location( - &self, - entry_ix: usize, - location_ix: usize, - window: &mut Window, - cx: &mut Context, - ) -> Option<()> { - let location = self - .thread()? - .read(cx) - .entries() + project.update(cx, |_, cx| { + cx.emit(project::Event::RevealInProjectPanel(entry.id)); + }); + } + MentionUri::Symbol { + path, line_range, .. + } + | MentionUri::Selection { path, line_range } => { + let project = workspace.project(); + let Some((path, _)) = project.update(cx, |project, cx| { + let path = project.find_project_path(path, cx)?; + let entry = project.entry_for_path(&path, cx)?; + Some((path, entry)) + }) else { + return; + }; + + let item = workspace.open_path(path, None, true, window, cx); + window + .spawn(cx, async move |cx| { + let Some(editor) = item.await?.downcast::() else { + return Ok(()); + }; + let range = + Point::new(line_range.start, 0)..Point::new(line_range.start, 0); + editor + .update_in(cx, |editor, window, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |s| s.select_ranges(vec![range]), + ); + }) + .ok(); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + MentionUri::Thread { id, name } => { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.load_agent_thread( + DbThreadMetadata { + id, + title: name.into(), + updated_at: Default::default(), + }, + window, + cx, + ) + }); + } + } + MentionUri::TextThread { path, .. } => { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel + .open_saved_prompt_editor(path.as_path().into(), window, cx) + .detach_and_log_err(cx); + }); + } + } + MentionUri::Rule { id, .. } => { + let PromptId::User { uuid } = id else { + return; + }; + window.dispatch_action( + Box::new(OpenRulesLibrary { + prompt_to_select: Some(uuid.0), + }), + cx, + ) + } + MentionUri::Fetch { url } => { + cx.open_url(url.as_str()); + } + }) + } else { + cx.open_url(&url); + } + } + + fn open_tool_call_location( + &self, + entry_ix: usize, + location_ix: usize, + window: &mut Window, + cx: &mut Context, + ) -> Option<()> { + let (tool_call_location, agent_location) = self + .thread()? + .read(cx) + .entries() .get(entry_ix)? - .locations()? - .get(location_ix)?; + .location(location_ix)?; let project_path = self .project .read(cx) - .find_project_path(&location.path, cx)?; + .find_project_path(&tool_call_location.path, cx)?; let open_task = self .workspace - .update(cx, |worskpace, cx| { - worskpace.open_path(project_path, None, true, window, cx) + .update(cx, |workspace, cx| { + workspace.open_path(project_path, None, true, window, cx) }) .log_err()?; - window .spawn(cx, async move |cx| { let item = open_task.await?; @@ -2295,17 +3708,22 @@ impl AcpThreadView { }; active_editor.update_in(cx, |editor, window, cx| { - let snapshot = editor.buffer().read(cx).snapshot(cx); - let first_hunk = editor - .diff_hunks_in_ranges( - &[editor::Anchor::min()..editor::Anchor::max()], - &snapshot, - ) - .next(); - if let Some(first_hunk) = first_hunk { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let multibuffer = editor.buffer().read(cx); + let buffer = multibuffer.as_singleton(); + if agent_location.buffer.upgrade() == buffer { + let excerpt_id = multibuffer.excerpt_ids().first().cloned(); + let anchor = editor::Anchor::in_buffer( + excerpt_id.unwrap(), + buffer.unwrap().read(cx).remote_id(), + agent_location.position, + ); editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_anchor_ranges([first_hunk_start..first_hunk_start]); + selections.select_anchor_ranges([anchor..anchor]); + }) + } else { + let row = tool_call_location.line.unwrap_or_default(); + editor.change_selections(Default::default(), window, cx, |selections| { + selections.select_ranges([Point::new(row, 0)..Point::new(row, 0)]); }) } })?; @@ -2343,7 +3761,7 @@ impl AcpThreadView { let project = workspace.project().clone(); if !project.read(cx).is_local() { - anyhow::bail!("failed to open active thread as markdown in remote project"); + bail!("failed to open active thread as markdown in remote project"); } let buffer = project.update(cx, |project, cx| { @@ -2462,62 +3880,61 @@ impl AcpThreadView { }) }) .log_err() + && let Some(pop_up) = screen_window.entity(cx).log_err() { - if let Some(pop_up) = screen_window.entity(cx).log_err() { - self.notification_subscriptions - .entry(screen_window) - .or_insert_with(Vec::new) - .push(cx.subscribe_in(&pop_up, window, { - |this, _, event, window, cx| match event { - AgentNotificationEvent::Accepted => { - let handle = window.window_handle(); - cx.activate(true); - - let workspace_handle = this.workspace.clone(); - - // If there are multiple Zed windows, activate the correct one. - cx.defer(move |cx| { - handle - .update(cx, |_view, window, _cx| { - window.activate_window(); - - if let Some(workspace) = workspace_handle.upgrade() { - workspace.update(_cx, |workspace, cx| { - workspace.focus_panel::(window, cx); - }); - } - }) - .log_err(); - }); + self.notification_subscriptions + .entry(screen_window) + .or_insert_with(Vec::new) + .push(cx.subscribe_in(&pop_up, window, { + |this, _, event, window, cx| match event { + AgentNotificationEvent::Accepted => { + let handle = window.window_handle(); + cx.activate(true); + + let workspace_handle = this.workspace.clone(); + + // If there are multiple Zed windows, activate the correct one. + cx.defer(move |cx| { + handle + .update(cx, |_view, window, _cx| { + window.activate_window(); + + if let Some(workspace) = workspace_handle.upgrade() { + workspace.update(_cx, |workspace, cx| { + workspace.focus_panel::(window, cx); + }); + } + }) + .log_err(); + }); - this.dismiss_notifications(cx); - } - AgentNotificationEvent::Dismissed => { - this.dismiss_notifications(cx); - } + this.dismiss_notifications(cx); } - })); - - self.notifications.push(screen_window); - - // If the user manually refocuses the original window, dismiss the popup. - self.notification_subscriptions - .entry(screen_window) - .or_insert_with(Vec::new) - .push({ - let pop_up_weak = pop_up.downgrade(); - - cx.observe_window_activation(window, move |_, window, cx| { - if window.is_window_active() { - if let Some(pop_up) = pop_up_weak.upgrade() { - pop_up.update(cx, |_, cx| { - cx.emit(AgentNotificationEvent::Dismissed); - }); - } - } - }) - }); - } + AgentNotificationEvent::Dismissed => { + this.dismiss_notifications(cx); + } + } + })); + + self.notifications.push(screen_window); + + // If the user manually refocuses the original window, dismiss the popup. + self.notification_subscriptions + .entry(screen_window) + .or_insert_with(Vec::new) + .push({ + let pop_up_weak = pop_up.downgrade(); + + cx.observe_window_activation(window, move |_, window, cx| { + if window.is_window_active() + && let Some(pop_up) = pop_up_weak.upgrade() + { + pop_up.update(cx, |_, cx| { + cx.emit(AgentNotificationEvent::Dismissed); + }); + } + }) + }); } } @@ -2534,8 +3951,9 @@ impl AcpThreadView { } fn render_thread_controls(&self, cx: &Context) -> impl IntoElement { - let open_as_markdown = IconButton::new("open-as-markdown", IconName::FileText) - .icon_size(IconSize::XSmall) + let open_as_markdown = IconButton::new("open-as-markdown", IconName::FileMarkdown) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(Color::Ignored) .tooltip(Tooltip::text("Open Thread as Markdown")) .on_click(cx.listener(move |this, _, window, cx| { @@ -2545,15 +3963,18 @@ impl AcpThreadView { } })); - let scroll_to_top = IconButton::new("scroll_to_top", IconName::ArrowUpAlt) - .icon_size(IconSize::XSmall) + let scroll_to_top = IconButton::new("scroll_to_top", IconName::ArrowUp) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(Color::Ignored) .tooltip(Tooltip::text("Scroll To Top")) .on_click(cx.listener(move |this, _, _, cx| { this.scroll_to_top(cx); })); - h_flex() + let mut container = h_flex() + .id("thread-controls-container") + .group("thread-controls-container") .w_full() .mr_1() .pb_2() @@ -2561,9 +3982,149 @@ impl AcpThreadView { .opacity(0.4) .hover(|style| style.opacity(1.)) .flex_wrap() - .justify_end() - .child(open_as_markdown) - .child(scroll_to_top) + .justify_end(); + + if AgentSettings::get_global(cx).enable_feedback + && self + .thread() + .is_some_and(|thread| thread.read(cx).connection().telemetry().is_some()) + { + let feedback = self.thread_feedback.feedback; + container = container.child( + div().visible_on_hover("thread-controls-container").child( + Label::new( + match feedback { + Some(ThreadFeedback::Positive) => "Thanks for your feedback!", + Some(ThreadFeedback::Negative) => "We appreciate your feedback and will use it to improve.", + None => "Rating the thread sends all of your current conversation to the Zed team.", + } + ) + .color(Color::Muted) + .size(LabelSize::XSmall) + .truncate(), + ), + ).child( + h_flex() + .child( + IconButton::new("feedback-thumbs-up", IconName::ThumbsUp) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .icon_color(match feedback { + Some(ThreadFeedback::Positive) => Color::Accent, + _ => Color::Ignored, + }) + .tooltip(Tooltip::text("Helpful Response")) + .on_click(cx.listener(move |this, _, window, cx| { + this.handle_feedback_click( + ThreadFeedback::Positive, + window, + cx, + ); + })), + ) + .child( + IconButton::new("feedback-thumbs-down", IconName::ThumbsDown) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .icon_color(match feedback { + Some(ThreadFeedback::Negative) => Color::Accent, + _ => Color::Ignored, + }) + .tooltip(Tooltip::text("Not Helpful")) + .on_click(cx.listener(move |this, _, window, cx| { + this.handle_feedback_click( + ThreadFeedback::Negative, + window, + cx, + ); + })), + ) + ) + } + + container.child(open_as_markdown).child(scroll_to_top) + } + + fn render_feedback_feedback_editor( + editor: Entity, + window: &mut Window, + cx: &Context, + ) -> Div { + let focus_handle = editor.focus_handle(cx); + v_flex() + .key_context("AgentFeedbackMessageEditor") + .on_action(cx.listener(move |this, _: &menu::Cancel, _, cx| { + this.thread_feedback.dismiss_comments(); + cx.notify(); + })) + .on_action(cx.listener(move |this, _: &menu::Confirm, _window, cx| { + this.submit_feedback_message(cx); + })) + .mb_2() + .mx_4() + .p_2() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_background) + .child(editor) + .child( + h_flex() + .gap_1() + .justify_end() + .child( + Button::new("dismiss-feedback-message", "Cancel") + .label_size(LabelSize::Small) + .key_binding( + KeyBinding::for_action_in(&menu::Cancel, &focus_handle, window, cx) + .map(|kb| kb.size(rems_from_px(10.))), + ) + .on_click(cx.listener(move |this, _, _window, cx| { + this.thread_feedback.dismiss_comments(); + cx.notify(); + })), + ) + .child( + Button::new("submit-feedback-message", "Share Feedback") + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .label_size(LabelSize::Small) + .key_binding( + KeyBinding::for_action_in( + &menu::Confirm, + &focus_handle, + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(10.))), + ) + .on_click(cx.listener(move |this, _, _window, cx| { + this.submit_feedback_message(cx); + })), + ), + ) + } + + fn handle_feedback_click( + &mut self, + feedback: ThreadFeedback, + window: &mut Window, + cx: &mut Context, + ) { + let Some(thread) = self.thread().cloned() else { + return; + }; + + self.thread_feedback.submit(thread, feedback, window, cx); + cx.notify(); + } + + fn submit_feedback_message(&mut self, cx: &mut Context) { + let Some(thread) = self.thread().cloned() else { + return; + }; + + self.thread_feedback.submit_comments(thread, cx); + cx.notify(); } fn render_vertical_scrollbar(&self, cx: &mut Context) -> Stateful
{ @@ -2599,153 +4160,516 @@ impl AcpThreadView { .children(Scrollbar::vertical(self.scrollbar_state.clone()).map(|s| s.auto_hide(cx))) } - fn settings_changed(&mut self, _window: &mut Window, cx: &mut Context) { - for diff_editor in self.diff_editors.values() { - diff_editor.update(cx, |diff_editor, cx| { - diff_editor.set_text_style_refinement(diff_editor_text_style_refinement(cx)); - cx.notify(); - }) - } - } -} + fn render_token_limit_callout( + &self, + line_height: Pixels, + cx: &mut Context, + ) -> Option { + let token_usage = self.thread()?.read(cx).token_usage()?; + let ratio = token_usage.ratio(); + + let (severity, title) = match ratio { + acp_thread::TokenUsageRatio::Normal => return None, + acp_thread::TokenUsageRatio::Warning => { + (Severity::Warning, "Thread reaching the token limit soon") + } + acp_thread::TokenUsageRatio::Exceeded => { + (Severity::Error, "Thread reached the token limit") + } + }; -impl Focusable for AcpThreadView { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.message_editor.focus_handle(cx) - } -} + let burn_mode_available = self.as_native_thread(cx).is_some_and(|thread| { + thread.read(cx).completion_mode() == CompletionMode::Normal + && thread + .read(cx) + .model() + .is_some_and(|model| model.supports_burn_mode()) + }); -impl Render for AcpThreadView { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - v_flex() - .size_full() - .key_context("AcpThread") - .on_action(cx.listener(Self::chat)) - .on_action(cx.listener(Self::previous_history_message)) - .on_action(cx.listener(Self::next_history_message)) - .on_action(cx.listener(Self::open_agent_diff)) - .bg(cx.theme().colors().panel_background) - .child(match &self.thread_state { - ThreadState::Unauthenticated { connection } => v_flex() - .p_2() - .flex_1() - .items_center() - .justify_center() - .child(self.render_pending_auth_state()) - .child(h_flex().mt_1p5().justify_center().children( - connection.auth_methods().into_iter().map(|method| { - Button::new( - SharedString::from(method.id.0.clone()), - method.name.clone(), - ) - .on_click({ - let method_id = method.id.clone(); - cx.listener(move |this, _, window, cx| { - this.authenticate(method_id.clone(), window, cx) - }) - }) - }), - )), - ThreadState::Loading { .. } => v_flex().flex_1().child(self.render_empty_state(cx)), - ThreadState::LoadError(e) => v_flex() - .p_2() - .flex_1() - .items_center() - .justify_center() - .child(self.render_load_error(e, cx)), - ThreadState::ServerExited { status } => v_flex() - .p_2() - .flex_1() - .items_center() - .justify_center() - .child(self.render_server_exited(*status, cx)), - ThreadState::Ready { thread, .. } => { - let thread_clone = thread.clone(); + let description = if burn_mode_available { + "To continue, start a new thread from a summary or turn Burn Mode on." + } else { + "To continue, start a new thread from a summary." + }; - v_flex().flex_1().map(|this| { - if self.list_state.item_count() > 0 { + Some( + Callout::new() + .severity(severity) + .line_height(line_height) + .title(title) + .description(description) + .actions_slot( + h_flex() + .gap_0p5() + .child( + Button::new("start-new-thread", "Start New Thread") + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + let Some(thread) = this.thread() else { + return; + }; + let session_id = thread.read(cx).session_id().clone(); + window.dispatch_action( + crate::NewNativeAgentThreadFromSummary { + from_session_id: session_id, + } + .boxed_clone(), + cx, + ); + })), + ) + .when(burn_mode_available, |this| { this.child( - list( - self.list_state.clone(), - cx.processor(|this, index: usize, window, cx| { - let Some((entry, len)) = this.thread().and_then(|thread| { - let entries = &thread.read(cx).entries(); - Some((entries.get(index)?, entries.len())) - }) else { - return Empty.into_any(); - }; - this.render_entry(index, len, entry, window, cx) - }), - ) - .with_sizing_behavior(gpui::ListSizingBehavior::Auto) - .flex_grow() - .into_any(), + IconButton::new("burn-mode-callout", IconName::ZedBurnMode) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(|this, _event, window, cx| { + this.toggle_burn_mode(&ToggleBurnMode, window, cx); + })), ) - .child(self.render_vertical_scrollbar(cx)) - .children(match thread_clone.read(cx).status() { - ThreadStatus::Idle | ThreadStatus::WaitingForToolConfirmation => { - None - } - ThreadStatus::Generating => div() - .px_5() - .py_2() - .child(LoadingLabel::new("").size(LabelSize::Small)) - .into(), - }) - .children(self.render_activity_bar(&thread_clone, window, cx)) - } else { - this.child(self.render_empty_state(cx)) - } - }) - } - }) - .when_some(self.last_error.clone(), |el, error| { - el.child( - div() - .p_2() - .text_xs() - .border_t_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().status().error_background) - .child( - self.render_markdown(error, default_markdown_style(false, window, cx)), - ), - ) - }) - .child(self.render_message_editor(window, cx)) + }), + ), + ) } -} -fn user_message_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { - let mut style = default_markdown_style(false, window, cx); - let mut text_style = window.text_style(); - let theme_settings = ThemeSettings::get_global(cx); + fn render_usage_callout(&self, line_height: Pixels, cx: &mut Context) -> Option
{ + if !self.is_using_zed_ai_models(cx) { + return None; + } - let buffer_font = theme_settings.buffer_font.family.clone(); - let buffer_font_size = TextSize::Small.rems(cx); + let user_store = self.project.read(cx).user_store().read(cx); + if user_store.is_usage_based_billing_enabled() { + return None; + } - text_style.refine(&TextStyleRefinement { - font_family: Some(buffer_font), - font_size: Some(buffer_font_size.into()), - ..Default::default() - }); + let plan = user_store.plan().unwrap_or(cloud_llm_client::Plan::ZedFree); - style.base_text_style = text_style; - style.link_callback = Some(Rc::new(move |url, cx| { - if MentionPath::try_parse(url).is_some() { - let colors = cx.theme().colors(); - Some(TextStyleRefinement { - background_color: Some(colors.element_background), - ..Default::default() - }) - } else { - None - } - })); - style -} + let usage = user_store.model_request_usage()?; -fn default_markdown_style(buffer_font: bool, window: &Window, cx: &App) -> MarkdownStyle { + Some( + div() + .child(UsageCallout::new(plan, usage)) + .line_height(line_height), + ) + } + + fn settings_changed(&mut self, _window: &mut Window, cx: &mut Context) { + self.entry_view_state.update(cx, |entry_view_state, cx| { + entry_view_state.settings_changed(cx); + }); + } + + pub(crate) fn insert_dragged_files( + &self, + paths: Vec, + added_worktrees: Vec>, + window: &mut Window, + cx: &mut Context, + ) { + self.message_editor.update(cx, |message_editor, cx| { + message_editor.insert_dragged_files(paths, added_worktrees, window, cx); + }) + } + + pub(crate) fn insert_selections(&self, window: &mut Window, cx: &mut Context) { + self.message_editor.update(cx, |message_editor, cx| { + message_editor.insert_selections(window, cx); + }) + } + + fn render_thread_retry_status_callout( + &self, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + let state = self.thread_retry_status.as_ref()?; + + let next_attempt_in = state + .duration + .saturating_sub(Instant::now().saturating_duration_since(state.started_at)); + if next_attempt_in.is_zero() { + return None; + } + + let next_attempt_in_secs = next_attempt_in.as_secs() + 1; + + let retry_message = if state.max_attempts == 1 { + if next_attempt_in_secs == 1 { + "Retrying. Next attempt in 1 second.".to_string() + } else { + format!("Retrying. Next attempt in {next_attempt_in_secs} seconds.") + } + } else if next_attempt_in_secs == 1 { + format!( + "Retrying. Next attempt in 1 second (Attempt {} of {}).", + state.attempt, state.max_attempts, + ) + } else { + format!( + "Retrying. Next attempt in {next_attempt_in_secs} seconds (Attempt {} of {}).", + state.attempt, state.max_attempts, + ) + }; + + Some( + Callout::new() + .severity(Severity::Warning) + .title(state.last_error.clone()) + .description(retry_message), + ) + } + + fn render_thread_error(&self, window: &mut Window, cx: &mut Context) -> Option
{ + let content = match self.thread_error.as_ref()? { + ThreadError::Other(error) => self.render_any_thread_error(error.clone(), cx), + ThreadError::AuthenticationRequired(error) => { + self.render_authentication_required_error(error.clone(), cx) + } + ThreadError::PaymentRequired => self.render_payment_required_error(cx), + ThreadError::ModelRequestLimitReached(plan) => { + self.render_model_request_limit_reached_error(*plan, cx) + } + ThreadError::ToolUseLimitReached => { + self.render_tool_use_limit_reached_error(window, cx)? + } + }; + + Some(div().child(content)) + } + + fn render_any_thread_error(&self, error: SharedString, cx: &mut Context<'_, Self>) -> Callout { + Callout::new() + .severity(Severity::Error) + .title("Error") + .description(error.clone()) + .actions_slot(self.create_copy_button(error.to_string())) + .dismiss_action(self.dismiss_error_button(cx)) + } + + fn render_payment_required_error(&self, cx: &mut Context) -> Callout { + const ERROR_MESSAGE: &str = + "You reached your free usage limit. Upgrade to Zed Pro for more prompts."; + + Callout::new() + .severity(Severity::Error) + .title("Free Usage Exceeded") + .description(ERROR_MESSAGE) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.upgrade_button(cx)) + .child(self.create_copy_button(ERROR_MESSAGE)), + ) + .dismiss_action(self.dismiss_error_button(cx)) + } + + fn render_authentication_required_error( + &self, + error: SharedString, + cx: &mut Context, + ) -> Callout { + Callout::new() + .severity(Severity::Error) + .title("Authentication Required") + .description(error.clone()) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.authenticate_button(cx)) + .child(self.create_copy_button(error)), + ) + .dismiss_action(self.dismiss_error_button(cx)) + } + + fn render_model_request_limit_reached_error( + &self, + plan: cloud_llm_client::Plan, + cx: &mut Context, + ) -> Callout { + let error_message = match plan { + cloud_llm_client::Plan::ZedPro => "Upgrade to usage-based billing for more prompts.", + cloud_llm_client::Plan::ZedProTrial | cloud_llm_client::Plan::ZedFree => { + "Upgrade to Zed Pro for more prompts." + } + }; + + Callout::new() + .severity(Severity::Error) + .title("Model Prompt Limit Reached") + .description(error_message) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.upgrade_button(cx)) + .child(self.create_copy_button(error_message)), + ) + .dismiss_action(self.dismiss_error_button(cx)) + } + + fn render_tool_use_limit_reached_error( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let thread = self.as_native_thread(cx)?; + let supports_burn_mode = thread + .read(cx) + .model() + .is_some_and(|model| model.supports_burn_mode()); + + let focus_handle = self.focus_handle(cx); + + Some( + Callout::new() + .icon(IconName::Info) + .title("Consecutive tool use limit reached.") + .actions_slot( + h_flex() + .gap_0p5() + .when(supports_burn_mode, |this| { + this.child( + Button::new("continue-burn-mode", "Continue with Burn Mode") + .style(ButtonStyle::Filled) + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .layer(ElevationIndex::ModalSurface) + .label_size(LabelSize::Small) + .key_binding( + KeyBinding::for_action_in( + &ContinueWithBurnMode, + &focus_handle, + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(10.))), + ) + .tooltip(Tooltip::text( + "Enable Burn Mode for unlimited tool use.", + )) + .on_click({ + cx.listener(move |this, _, _window, cx| { + thread.update(cx, |thread, cx| { + thread + .set_completion_mode(CompletionMode::Burn, cx); + }); + this.resume_chat(cx); + }) + }), + ) + }) + .child( + Button::new("continue-conversation", "Continue") + .layer(ElevationIndex::ModalSurface) + .label_size(LabelSize::Small) + .key_binding( + KeyBinding::for_action_in( + &ContinueThread, + &focus_handle, + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(10.))), + ) + .on_click(cx.listener(|this, _, _window, cx| { + this.resume_chat(cx); + })), + ), + ), + ) + } + + fn create_copy_button(&self, message: impl Into) -> impl IntoElement { + let message = message.into(); + + IconButton::new("copy", IconName::Copy) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Copy Error Message")) + .on_click(move |_, _, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(message.clone())) + }) + } + + fn dismiss_error_button(&self, cx: &mut Context) -> impl IntoElement { + IconButton::new("dismiss", IconName::Close) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Dismiss Error")) + .on_click(cx.listener({ + move |this, _, _, cx| { + this.clear_thread_error(cx); + cx.notify(); + } + })) + } + + fn authenticate_button(&self, cx: &mut Context) -> impl IntoElement { + Button::new("authenticate", "Authenticate") + .label_size(LabelSize::Small) + .style(ButtonStyle::Filled) + .on_click(cx.listener({ + move |this, _, window, cx| { + let agent = this.agent.clone(); + let ThreadState::Ready { thread, .. } = &this.thread_state else { + return; + }; + + let connection = thread.read(cx).connection().clone(); + let err = AuthRequired { + description: None, + provider_id: None, + }; + this.clear_thread_error(cx); + let this = cx.weak_entity(); + window.defer(cx, |window, cx| { + Self::handle_auth_required(this, err, agent, connection, window, cx); + }) + } + })) + } + + fn upgrade_button(&self, cx: &mut Context) -> impl IntoElement { + Button::new("upgrade", "Upgrade") + .label_size(LabelSize::Small) + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .on_click(cx.listener({ + move |this, _, _, cx| { + this.clear_thread_error(cx); + cx.open_url(&zed_urls::upgrade_to_zed_pro_url(cx)); + } + })) + } + + fn reset(&mut self, window: &mut Window, cx: &mut Context) { + self.thread_state = Self::initial_state( + self.agent.clone(), + None, + self.workspace.clone(), + self.project.clone(), + window, + cx, + ); + cx.notify(); + } + + pub fn delete_history_entry(&mut self, entry: HistoryEntry, cx: &mut Context) { + let task = match entry { + HistoryEntry::AcpThread(thread) => self.history_store.update(cx, |history, cx| { + history.delete_thread(thread.id.clone(), cx) + }), + HistoryEntry::TextThread(context) => self.history_store.update(cx, |history, cx| { + history.delete_text_thread(context.path.clone(), cx) + }), + }; + task.detach_and_log_err(cx); + } +} + +impl Focusable for AcpThreadView { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.message_editor.focus_handle(cx) + } +} + +impl Render for AcpThreadView { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let has_messages = self.list_state.item_count() > 0; + let line_height = TextSize::Small.rems(cx).to_pixels(window.rem_size()) * 1.5; + + v_flex() + .size_full() + .key_context("AcpThread") + .on_action(cx.listener(Self::open_agent_diff)) + .on_action(cx.listener(Self::toggle_burn_mode)) + .on_action(cx.listener(Self::keep_all)) + .on_action(cx.listener(Self::reject_all)) + .bg(cx.theme().colors().panel_background) + .child(match &self.thread_state { + ThreadState::Unauthenticated { + connection, + description, + configuration_view, + pending_auth_method, + .. + } => self.render_auth_required_state( + connection, + description.as_ref(), + configuration_view.as_ref(), + pending_auth_method.as_ref(), + window, + cx, + ), + ThreadState::Loading { .. } => { + v_flex().flex_1().child(self.render_empty_state(window, cx)) + } + ThreadState::LoadError(e) => v_flex() + .p_2() + .flex_1() + .items_center() + .justify_center() + .child(self.render_load_error(e, cx)), + ThreadState::Ready { thread, .. } => { + let thread_clone = thread.clone(); + + v_flex().flex_1().map(|this| { + if has_messages { + this.child( + list( + self.list_state.clone(), + cx.processor(|this, index: usize, window, cx| { + let Some((entry, len)) = this.thread().and_then(|thread| { + let entries = &thread.read(cx).entries(); + Some((entries.get(index)?, entries.len())) + }) else { + return Empty.into_any(); + }; + this.render_entry(index, len, entry, window, cx) + }), + ) + .with_sizing_behavior(gpui::ListSizingBehavior::Auto) + .flex_grow() + .into_any(), + ) + .child(self.render_vertical_scrollbar(cx)) + .children( + match thread_clone.read(cx).status() { + ThreadStatus::Idle + | ThreadStatus::WaitingForToolConfirmation => None, + ThreadStatus::Generating => div() + .px_5() + .py_2() + .child(LoadingLabel::new("").size(LabelSize::Small)) + .into(), + }, + ) + } else { + this.child(self.render_empty_state(window, cx)) + } + }) + } + }) + // The activity bar is intentionally rendered outside of the ThreadState::Ready match + // above so that the scrollbar doesn't render behind it. The current setup allows + // the scrollbar to stop exactly at the activity bar start. + .when(has_messages, |this| match &self.thread_state { + ThreadState::Ready { thread, .. } => { + this.children(self.render_activity_bar(thread, window, cx)) + } + _ => this, + }) + .children(self.render_thread_retry_status_callout(window, cx)) + .children(self.render_thread_error(window, cx)) + .children( + if let Some(usage_callout) = self.render_usage_callout(line_height, cx) { + Some(usage_callout.into_any_element()) + } else { + self.render_token_limit_callout(line_height, cx) + .map(|token_limit_callout| token_limit_callout.into_any_element()) + }, + ) + .child(self.render_message_editor(window, cx)) + } +} + +fn default_markdown_style(buffer_font: bool, window: &Window, cx: &App) -> MarkdownStyle { let theme_settings = ThemeSettings::get_global(cx); let colors = cx.theme().colors(); @@ -2884,31 +4808,32 @@ fn plan_label_markdown_style( } } -fn diff_editor_text_style_refinement(cx: &mut App) -> TextStyleRefinement { - TextStyleRefinement { - font_size: Some( - TextSize::Small - .rems(cx) - .to_pixels(ThemeSettings::get_global(cx).agent_font_size(cx)) - .into(), - ), +fn terminal_command_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { + let default_md_style = default_markdown_style(true, window, cx); + + MarkdownStyle { + base_text_style: TextStyle { + ..default_md_style.base_text_style + }, + selection_background_color: cx.theme().colors().element_selection_background, ..Default::default() } } #[cfg(test)] -mod tests { +pub(crate) mod tests { + use acp_thread::StubAgentConnection; use agent_client_protocol::SessionId; + use assistant_context::ContextStore; use editor::EditorSettings; use fs::FakeFs; - use futures::future::try_join_all; - use gpui::{SemanticVersion, TestAppContext, VisualTestContext}; - use lsp::{CompletionContext, CompletionTriggerKind}; - use project::CompletionIntent; - use rand::Rng; + use gpui::{EventEmitter, SemanticVersion, TestAppContext, VisualTestContext}; + use project::Project; use serde_json::json; use settings::SettingsStore; - use util::path; + use std::any::Any; + use std::path::Path; + use workspace::Item; use super::*; @@ -2916,7 +4841,7 @@ mod tests { async fn test_drop(cx: &mut TestAppContext) { init_test(cx); - let (thread_view, _cx) = setup_thread_view(StubAgentServer::default(), cx).await; + let (thread_view, _cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; let weak_view = thread_view.downgrade(); drop(thread_view); assert!(!weak_view.is_upgradable()); @@ -2926,7 +4851,7 @@ mod tests { async fn test_notification_for_stop_event(cx: &mut TestAppContext) { init_test(cx); - let (thread_view, cx) = setup_thread_view(StubAgentServer::default(), cx).await; + let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); message_editor.update_in(cx, |editor, window, cx| { @@ -2936,7 +4861,7 @@ mod tests { cx.deactivate_window(); thread_view.update_in(cx, |thread_view, window, cx| { - thread_view.chat(&Chat, window, cx); + thread_view.send(window, cx); }); cx.run_until_parked(); @@ -2963,7 +4888,7 @@ mod tests { cx.deactivate_window(); thread_view.update_in(cx, |thread_view, window, cx| { - thread_view.chat(&Chat, window, cx); + thread_view.send(window, cx); }); cx.run_until_parked(); @@ -2990,8 +4915,8 @@ mod tests { raw_input: None, raw_output: None, }; - let connection = StubAgentConnection::new(vec![acp::SessionUpdate::ToolCall(tool_call)]) - .with_permission_requests(HashMap::from_iter([( + let connection = + StubAgentConnection::new().with_permission_requests(HashMap::from_iter([( tool_call_id, vec![acp::PermissionOption { id: acp::PermissionOptionId("1".into()), @@ -2999,6 +4924,9 @@ mod tests { kind: acp::PermissionOptionKind::AllowOnce, }], )])); + + connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]); + let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); @@ -3009,7 +4937,7 @@ mod tests { cx.deactivate_window(); thread_view.update_in(cx, |thread_view, window, cx| { - thread_view.chat(&Chat, window, cx); + thread_view.send(window, cx); }); cx.run_until_parked(); @@ -3021,134 +4949,81 @@ mod tests { ); } - #[gpui::test] - async fn test_crease_removal(cx: &mut TestAppContext) { - init_test(cx); - + async fn setup_thread_view( + agent: impl AgentServer + 'static, + cx: &mut TestAppContext, + ) -> (Entity, &mut VisualTestContext) { let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({"file": ""})).await; - let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; - let agent = StubAgentServer::default(); + let project = Project::test(fs, [], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let context_store = + cx.update(|_window, cx| cx.new(|cx| ContextStore::fake(project.clone(), cx))); + let history_store = + cx.update(|_window, cx| cx.new(|cx| HistoryStore::new(context_store, cx))); + let thread_view = cx.update(|window, cx| { cx.new(|cx| { AcpThreadView::new( Rc::new(agent), + None, + None, workspace.downgrade(), project, - Rc::new(RefCell::new(MessageHistory::default())), - 1, + history_store, None, window, cx, ) }) }); - cx.run_until_parked(); + (thread_view, cx) + } - let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); - let excerpt_id = message_editor.update(cx, |editor, cx| { - editor - .buffer() - .read(cx) - .excerpt_ids() - .into_iter() - .next() - .unwrap() - }); - let completions = message_editor.update_in(cx, |editor, window, cx| { - editor.set_text("Hello @", window, cx); - let buffer = editor.buffer().read(cx).as_singleton().unwrap(); - let completion_provider = editor.completion_provider().unwrap(); - completion_provider.completions( - excerpt_id, - &buffer, - Anchor::MAX, - CompletionContext { - trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER, - trigger_character: Some("@".into()), - }, - window, - cx, - ) - }); - let [_, completion]: [_; 2] = completions - .await - .unwrap() - .into_iter() - .flat_map(|response| response.completions) - .collect::>() - .try_into() + fn add_to_workspace(thread_view: Entity, cx: &mut VisualTestContext) { + let workspace = thread_view.read_with(cx, |thread_view, _cx| thread_view.workspace.clone()); + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane( + Box::new(cx.new(|_| ThreadViewItem(thread_view.clone()))), + None, + true, + window, + cx, + ); + }) .unwrap(); + } - message_editor.update_in(cx, |editor, window, cx| { - let snapshot = editor.buffer().read(cx).snapshot(cx); - let start = snapshot - .anchor_in_excerpt(excerpt_id, completion.replace_range.start) - .unwrap(); - let end = snapshot - .anchor_in_excerpt(excerpt_id, completion.replace_range.end) - .unwrap(); - editor.edit([(start..end, completion.new_text)], cx); - (completion.confirm.unwrap())(CompletionIntent::Complete, window, cx); - }); + struct ThreadViewItem(Entity); - cx.run_until_parked(); + impl Item for ThreadViewItem { + type Event = (); - // Backspace over the inserted crease (and the following space). - message_editor.update_in(cx, |editor, window, cx| { - editor.backspace(&Default::default(), window, cx); - editor.backspace(&Default::default(), window, cx); - }); + fn include_in_nav_history() -> bool { + false + } - thread_view.update_in(cx, |thread_view, window, cx| { - thread_view.chat(&Chat, window, cx); - }); + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + "Test".into() + } + } - cx.run_until_parked(); + impl EventEmitter<()> for ThreadViewItem {} - let content = thread_view.update_in(cx, |thread_view, _window, _cx| { - thread_view - .message_history - .borrow() - .items() - .iter() - .flatten() - .cloned() - .collect::>() - }); + impl Focusable for ThreadViewItem { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.0.read(cx).focus_handle(cx) + } + } - // We don't send a resource link for the deleted crease. - pretty_assertions::assert_matches!(content.as_slice(), [acp::ContentBlock::Text { .. }]); - } - - async fn setup_thread_view( - agent: impl AgentServer + 'static, - cx: &mut TestAppContext, - ) -> (Entity, &mut VisualTestContext) { - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, [], cx).await; - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - - let thread_view = cx.update(|window, cx| { - cx.new(|cx| { - AcpThreadView::new( - Rc::new(agent), - workspace.downgrade(), - project, - Rc::new(RefCell::new(MessageHistory::default())), - 1, - None, - window, - cx, - ) - }) - }); - cx.run_until_parked(); - (thread_view, cx) + impl Render for ThreadViewItem { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + self.0.clone().into_any_element() + } } struct StubAgentServer { @@ -3162,8 +5037,12 @@ mod tests { } impl StubAgentServer { - fn default() -> Self { - Self::new(StubAgentConnection::default()) + fn default_response() -> Self { + let conn = StubAgentConnection::new(); + conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk { + content: "Default response".into(), + }]); + Self::new(conn) } } @@ -3172,19 +5051,19 @@ mod tests { C: 'static + AgentConnection + Send + Clone, { fn logo(&self) -> ui::IconName { - unimplemented!() + ui::IconName::Ai } fn name(&self) -> &'static str { - unimplemented!() + "Test" } fn empty_state_headline(&self) -> &'static str { - unimplemented!() + "Test" } fn empty_state_message(&self) -> &'static str { - unimplemented!() + "Test" } fn connect( @@ -3195,116 +5074,12 @@ mod tests { ) -> Task>> { Task::ready(Ok(Rc::new(self.connection.clone()))) } - } - - #[derive(Clone, Default)] - struct StubAgentConnection { - sessions: Arc>>>, - permission_requests: HashMap>, - updates: Vec, - } - - impl StubAgentConnection { - fn new(updates: Vec) -> Self { - Self { - updates, - permission_requests: HashMap::default(), - sessions: Arc::default(), - } - } - fn with_permission_requests( - mut self, - permission_requests: HashMap>, - ) -> Self { - self.permission_requests = permission_requests; + fn into_any(self: Rc) -> Rc { self } } - impl AgentConnection for StubAgentConnection { - fn auth_methods(&self) -> &[acp::AuthMethod] { - &[] - } - - fn new_thread( - self: Rc, - project: Entity, - _cwd: &Path, - cx: &mut gpui::AsyncApp, - ) -> Task>> { - let session_id = SessionId( - rand::thread_rng() - .sample_iter(&rand::distributions::Alphanumeric) - .take(7) - .map(char::from) - .collect::() - .into(), - ); - let thread = cx - .new(|cx| AcpThread::new("Test", self.clone(), project, session_id.clone(), cx)) - .unwrap(); - self.sessions.lock().insert(session_id, thread.downgrade()); - Task::ready(Ok(thread)) - } - - fn authenticate( - &self, - _method_id: acp::AuthMethodId, - _cx: &mut App, - ) -> Task> { - unimplemented!() - } - - fn prompt( - &self, - params: acp::PromptRequest, - cx: &mut App, - ) -> Task> { - let sessions = self.sessions.lock(); - let thread = sessions.get(¶ms.session_id).unwrap(); - let mut tasks = vec![]; - for update in &self.updates { - let thread = thread.clone(); - let update = update.clone(); - let permission_request = if let acp::SessionUpdate::ToolCall(tool_call) = &update - && let Some(options) = self.permission_requests.get(&tool_call.id) - { - Some((tool_call.clone(), options.clone())) - } else { - None - }; - let task = cx.spawn(async move |cx| { - if let Some((tool_call, options)) = permission_request { - let permission = thread.update(cx, |thread, cx| { - thread.request_tool_call_authorization( - tool_call.clone(), - options.clone(), - cx, - ) - })?; - permission.await?; - } - thread.update(cx, |thread, cx| { - thread.handle_session_update(update.clone(), cx).unwrap(); - })?; - anyhow::Ok(()) - }); - tasks.push(task); - } - cx.spawn(async move |_| { - try_join_all(tasks).await?; - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - }) - }) - } - - fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) { - unimplemented!() - } - } - #[derive(Clone)] struct SaboteurAgentConnection; @@ -3313,25 +5088,32 @@ mod tests { self: Rc, project: Entity, _cwd: &Path, - cx: &mut gpui::AsyncApp, + cx: &mut gpui::App, ) -> Task>> { - Task::ready(Ok(cx - .new(|cx| { - AcpThread::new( - "SaboteurAgentConnection", - self, - project, - SessionId("test".into()), - cx, - ) - }) - .unwrap())) + Task::ready(Ok(cx.new(|cx| { + let action_log = cx.new(|_| ActionLog::new(project.clone())); + AcpThread::new( + "SaboteurAgentConnection", + self, + project, + action_log, + SessionId("test".into()), + ) + }))) } fn auth_methods(&self) -> &[acp::AuthMethod] { &[] } + fn prompt_capabilities(&self) -> acp::PromptCapabilities { + acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + } + } + fn authenticate( &self, _method_id: acp::AuthMethodId, @@ -3342,6 +5124,7 @@ mod tests { fn prompt( &self, + _id: Option, _params: acp::PromptRequest, _cx: &mut App, ) -> Task> { @@ -3351,9 +5134,13 @@ mod tests { fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) { unimplemented!() } + + fn into_any(self: Rc) -> Rc { + self + } } - fn init_test(cx: &mut TestAppContext) { + pub(crate) fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); @@ -3364,6 +5151,567 @@ mod tests { ThemeSettings::register(cx); release_channel::init(SemanticVersion::default(), cx); EditorSettings::register(cx); + prompt_store::init(cx) + }); + } + + #[gpui::test] + async fn test_rewind_views(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + "test1.txt": "old content 1", + "test2.txt": "old content 2" + }), + ) + .await; + let project = Project::test(fs, [Path::new("/project")], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let context_store = + cx.update(|_window, cx| cx.new(|cx| ContextStore::fake(project.clone(), cx))); + let history_store = + cx.update(|_window, cx| cx.new(|cx| HistoryStore::new(context_store, cx))); + + let connection = Rc::new(StubAgentConnection::new()); + let thread_view = cx.update(|window, cx| { + cx.new(|cx| { + AcpThreadView::new( + Rc::new(StubAgentServer::new(connection.as_ref().clone())), + None, + None, + workspace.downgrade(), + project.clone(), + history_store.clone(), + None, + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + + let thread = thread_view + .read_with(cx, |view, _| view.thread().cloned()) + .unwrap(); + + // First user message + connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(acp::ToolCall { + id: acp::ToolCallId("tool1".into()), + title: "Edit file 1".into(), + kind: acp::ToolKind::Edit, + status: acp::ToolCallStatus::Completed, + content: vec![acp::ToolCallContent::Diff { + diff: acp::Diff { + path: "/project/test1.txt".into(), + old_text: Some("old content 1".into()), + new_text: "new content 1".into(), + }, + }], + locations: vec![], + raw_input: None, + raw_output: None, + })]); + + thread + .update(cx, |thread, cx| thread.send_raw("Give me a diff", cx)) + .await + .unwrap(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _| { + assert_eq!(thread.entries().len(), 2); + }); + + thread_view.read_with(cx, |view, cx| { + view.entry_view_state.read_with(cx, |entry_view_state, _| { + assert!( + entry_view_state + .entry(0) + .unwrap() + .message_editor() + .is_some() + ); + assert!(entry_view_state.entry(1).unwrap().has_content()); + }); + }); + + // Second user message + connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(acp::ToolCall { + id: acp::ToolCallId("tool2".into()), + title: "Edit file 2".into(), + kind: acp::ToolKind::Edit, + status: acp::ToolCallStatus::Completed, + content: vec![acp::ToolCallContent::Diff { + diff: acp::Diff { + path: "/project/test2.txt".into(), + old_text: Some("old content 2".into()), + new_text: "new content 2".into(), + }, + }], + locations: vec![], + raw_input: None, + raw_output: None, + })]); + + thread + .update(cx, |thread, cx| thread.send_raw("Another one", cx)) + .await + .unwrap(); + cx.run_until_parked(); + + let second_user_message_id = thread.read_with(cx, |thread, _| { + assert_eq!(thread.entries().len(), 4); + let AgentThreadEntry::UserMessage(user_message) = &thread.entries()[2] else { + panic!(); + }; + user_message.id.clone().unwrap() + }); + + thread_view.read_with(cx, |view, cx| { + view.entry_view_state.read_with(cx, |entry_view_state, _| { + assert!( + entry_view_state + .entry(0) + .unwrap() + .message_editor() + .is_some() + ); + assert!(entry_view_state.entry(1).unwrap().has_content()); + assert!( + entry_view_state + .entry(2) + .unwrap() + .message_editor() + .is_some() + ); + assert!(entry_view_state.entry(3).unwrap().has_content()); + }); + }); + + // Rewind to first message + thread + .update(cx, |thread, cx| thread.rewind(second_user_message_id, cx)) + .await + .unwrap(); + + cx.run_until_parked(); + + thread.read_with(cx, |thread, _| { + assert_eq!(thread.entries().len(), 2); + }); + + thread_view.read_with(cx, |view, cx| { + view.entry_view_state.read_with(cx, |entry_view_state, _| { + assert!( + entry_view_state + .entry(0) + .unwrap() + .message_editor() + .is_some() + ); + assert!(entry_view_state.entry(1).unwrap().has_content()); + + // Old views should be dropped + assert!(entry_view_state.entry(2).is_none()); + assert!(entry_view_state.entry(3).is_none()); + }); + }); + } + + #[gpui::test] + async fn test_message_editing_cancel(cx: &mut TestAppContext) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk { + content: acp::ContentBlock::Text(acp::TextContent { + text: "Response".into(), + annotations: None, + }), + }]); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; + add_to_workspace(thread_view.clone(), cx); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Original message to edit", window, cx); + }); + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + let user_message_editor = thread_view.read_with(cx, |view, cx| { + assert_eq!(view.editing_message, None); + + view.entry_view_state + .read(cx) + .entry(0) + .unwrap() + .message_editor() + .unwrap() + .clone() + }); + + // Focus + cx.focus(&user_message_editor); + thread_view.read_with(cx, |view, _cx| { + assert_eq!(view.editing_message, Some(0)); + }); + + // Edit + user_message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Edited message content", window, cx); + }); + + // Cancel + user_message_editor.update_in(cx, |_editor, window, cx| { + window.dispatch_action(Box::new(editor::actions::Cancel), cx); + }); + + thread_view.read_with(cx, |view, _cx| { + assert_eq!(view.editing_message, None); + }); + + user_message_editor.read_with(cx, |editor, cx| { + assert_eq!(editor.text(cx), "Original message to edit"); + }); + } + + #[gpui::test] + async fn test_message_doesnt_send_if_empty(cx: &mut TestAppContext) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; + add_to_workspace(thread_view.clone(), cx); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + let mut events = cx.events(&message_editor); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("", window, cx); + }); + + message_editor.update_in(cx, |_editor, window, cx| { + window.dispatch_action(Box::new(Chat), cx); + }); + cx.run_until_parked(); + // We shouldn't have received any messages + assert!(matches!( + events.try_next(), + Err(futures::channel::mpsc::TryRecvError { .. }) + )); + } + + #[gpui::test] + async fn test_message_editing_regenerate(cx: &mut TestAppContext) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk { + content: acp::ContentBlock::Text(acp::TextContent { + text: "Response".into(), + annotations: None, + }), + }]); + + let (thread_view, cx) = + setup_thread_view(StubAgentServer::new(connection.clone()), cx).await; + add_to_workspace(thread_view.clone(), cx); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Original message to edit", window, cx); + }); + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + let user_message_editor = thread_view.read_with(cx, |view, cx| { + assert_eq!(view.editing_message, None); + assert_eq!(view.thread().unwrap().read(cx).entries().len(), 2); + + view.entry_view_state + .read(cx) + .entry(0) + .unwrap() + .message_editor() + .unwrap() + .clone() + }); + + // Focus + cx.focus(&user_message_editor); + + // Edit + user_message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Edited message content", window, cx); + }); + + // Send + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk { + content: acp::ContentBlock::Text(acp::TextContent { + text: "New Response".into(), + annotations: None, + }), + }]); + + user_message_editor.update_in(cx, |_editor, window, cx| { + window.dispatch_action(Box::new(Chat), cx); + }); + + cx.run_until_parked(); + + thread_view.read_with(cx, |view, cx| { + assert_eq!(view.editing_message, None); + + let entries = view.thread().unwrap().read(cx).entries(); + assert_eq!(entries.len(), 2); + assert_eq!( + entries[0].to_markdown(cx), + "## User\n\nEdited message content\n\n" + ); + assert_eq!( + entries[1].to_markdown(cx), + "## Assistant\n\nNew Response\n\n" + ); + + let new_editor = view.entry_view_state.read_with(cx, |state, _cx| { + assert!(!state.entry(1).unwrap().has_content()); + state.entry(0).unwrap().message_editor().unwrap().clone() + }); + + assert_eq!(new_editor.read(cx).text(cx), "Edited message content"); + }) + } + + #[gpui::test] + async fn test_message_editing_while_generating(cx: &mut TestAppContext) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + let (thread_view, cx) = + setup_thread_view(StubAgentServer::new(connection.clone()), cx).await; + add_to_workspace(thread_view.clone(), cx); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Original message to edit", window, cx); + }); + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + let (user_message_editor, session_id) = thread_view.read_with(cx, |view, cx| { + let thread = view.thread().unwrap().read(cx); + assert_eq!(thread.entries().len(), 1); + + let editor = view + .entry_view_state + .read(cx) + .entry(0) + .unwrap() + .message_editor() + .unwrap() + .clone(); + + (editor, thread.session_id().clone()) + }); + + // Focus + cx.focus(&user_message_editor); + + thread_view.read_with(cx, |view, _cx| { + assert_eq!(view.editing_message, Some(0)); + }); + + // Edit + user_message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Edited message content", window, cx); + }); + + thread_view.read_with(cx, |view, _cx| { + assert_eq!(view.editing_message, Some(0)); + }); + + // Finish streaming response + cx.update(|_, cx| { + connection.send_update( + session_id.clone(), + acp::SessionUpdate::AgentMessageChunk { + content: acp::ContentBlock::Text(acp::TextContent { + text: "Response".into(), + annotations: None, + }), + }, + cx, + ); + connection.end_turn(session_id, acp::StopReason::EndTurn); + }); + + thread_view.read_with(cx, |view, _cx| { + assert_eq!(view.editing_message, Some(0)); + }); + + cx.run_until_parked(); + + // Should still be editing + cx.update(|window, cx| { + assert!(user_message_editor.focus_handle(cx).is_focused(window)); + assert_eq!(thread_view.read(cx).editing_message, Some(0)); + assert_eq!( + user_message_editor.read(cx).text(cx), + "Edited message content" + ); + }); + } + + #[gpui::test] + async fn test_interrupt(cx: &mut TestAppContext) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + let (thread_view, cx) = + setup_thread_view(StubAgentServer::new(connection.clone()), cx).await; + add_to_workspace(thread_view.clone(), cx); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Message 1", window, cx); + }); + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + let (thread, session_id) = thread_view.read_with(cx, |view, cx| { + let thread = view.thread().unwrap(); + + (thread.clone(), thread.read(cx).session_id().clone()) + }); + + cx.run_until_parked(); + + cx.update(|_, cx| { + connection.send_update( + session_id.clone(), + acp::SessionUpdate::AgentMessageChunk { + content: "Message 1 resp".into(), + }, + cx, + ); + }); + + cx.run_until_parked(); + + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc::indoc! {" + ## User + + Message 1 + + ## Assistant + + Message 1 resp + + "} + ) + }); + + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Message 2", window, cx); + }); + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.update(|_, cx| { + // Simulate a response sent after beginning to cancel + connection.send_update( + session_id.clone(), + acp::SessionUpdate::AgentMessageChunk { + content: "onse".into(), + }, + cx, + ); + }); + + cx.run_until_parked(); + + // Last Message 1 response should appear before Message 2 + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc::indoc! {" + ## User + + Message 1 + + ## Assistant + + Message 1 response + + ## User + + Message 2 + + "} + ) + }); + + cx.update(|_, cx| { + connection.send_update( + session_id.clone(), + acp::SessionUpdate::AgentMessageChunk { + content: "Message 2 response".into(), + }, + cx, + ); + connection.end_turn(session_id.clone(), acp::StopReason::EndTurn); + }); + + cx.run_until_parked(); + + thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + indoc::indoc! {" + ## User + + Message 1 + + ## Assistant + + Message 1 response + + ## User + + Message 2 + + ## Assistant + + Message 2 response + + "} + ) }); } } diff --git a/crates/agent_ui/src/active_thread.rs b/crates/agent_ui/src/active_thread.rs index 71526c8fe14f0f023cfad260525c5366e66a24e5..2cad9132950787b9e5404e638275fb92147db5a7 100644 --- a/crates/agent_ui/src/active_thread.rs +++ b/crates/agent_ui/src/active_thread.rs @@ -434,7 +434,7 @@ fn render_markdown_code_block( .child(content) .child( Icon::new(IconName::ArrowUpRight) - .size(IconSize::XSmall) + .size(IconSize::Small) .color(Color::Ignored), ), ) @@ -491,7 +491,7 @@ fn render_markdown_code_block( .on_click({ let active_thread = active_thread.clone(); let parsed_markdown = parsed_markdown.clone(); - let code_block_range = metadata.content_range.clone(); + let code_block_range = metadata.content_range; move |_event, _window, cx| { active_thread.update(cx, |this, cx| { this.copied_code_block_ids.insert((message_id, ix)); @@ -532,7 +532,6 @@ fn render_markdown_code_block( "Expand Code" })) .on_click({ - let active_thread = active_thread.clone(); move |_event, _window, cx| { active_thread.update(cx, |this, cx| { this.toggle_codeblock_expanded(message_id, ix); @@ -780,13 +779,11 @@ impl ActiveThread { let list_state = ListState::new(0, ListAlignment::Bottom, px(2048.)); - let workspace_subscription = if let Some(workspace) = workspace.upgrade() { - Some(cx.observe_release(&workspace, |this, _, cx| { + let workspace_subscription = workspace.upgrade().map(|workspace| { + cx.observe_release(&workspace, |this, _, cx| { this.dismiss_notifications(cx); - })) - } else { - None - }; + }) + }); let mut this = Self { language_registry, @@ -916,7 +913,7 @@ impl ActiveThread { ) { let rendered = self .rendered_tool_uses - .entry(tool_use_id.clone()) + .entry(tool_use_id) .or_insert_with(|| RenderedToolUse { label: cx.new(|cx| { Markdown::new("".into(), Some(self.language_registry.clone()), None, cx) @@ -1044,12 +1041,12 @@ impl ActiveThread { ); } ThreadEvent::StreamedAssistantText(message_id, text) => { - if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) { + if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(message_id) { rendered_message.append_text(text, cx); } } ThreadEvent::StreamedAssistantThinking(message_id, text) => { - if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) { + if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(message_id) { rendered_message.append_thinking(text, cx); } } @@ -1072,8 +1069,8 @@ impl ActiveThread { } ThreadEvent::MessageEdited(message_id) => { self.clear_last_error(); - if let Some(index) = self.messages.iter().position(|id| id == message_id) { - if let Some(rendered_message) = self.thread.update(cx, |thread, cx| { + if let Some(index) = self.messages.iter().position(|id| id == message_id) + && let Some(rendered_message) = self.thread.update(cx, |thread, cx| { thread.message(*message_id).map(|message| { let mut rendered_message = RenderedMessage { language_registry: self.language_registry.clone(), @@ -1084,14 +1081,14 @@ impl ActiveThread { } rendered_message }) - }) { - self.list_state.splice(index..index + 1, 1); - self.rendered_messages_by_id - .insert(*message_id, rendered_message); - self.scroll_to_bottom(cx); - self.save_thread(cx); - cx.notify(); - } + }) + { + self.list_state.splice(index..index + 1, 1); + self.rendered_messages_by_id + .insert(*message_id, rendered_message); + self.scroll_to_bottom(cx); + self.save_thread(cx); + cx.notify(); } } ThreadEvent::MessageDeleted(message_id) => { @@ -1218,7 +1215,7 @@ impl ActiveThread { match AgentSettings::get_global(cx).notify_when_agent_waiting { NotifyWhenAgentWaiting::PrimaryScreen => { if let Some(primary) = cx.primary_display() { - self.pop_up(icon, caption.into(), title.clone(), window, primary, cx); + self.pop_up(icon, caption.into(), title, window, primary, cx); } } NotifyWhenAgentWaiting::AllScreens => { @@ -1272,62 +1269,61 @@ impl ActiveThread { }) }) .log_err() + && let Some(pop_up) = screen_window.entity(cx).log_err() { - if let Some(pop_up) = screen_window.entity(cx).log_err() { - self.notification_subscriptions - .entry(screen_window) - .or_insert_with(Vec::new) - .push(cx.subscribe_in(&pop_up, window, { - |this, _, event, window, cx| match event { - AgentNotificationEvent::Accepted => { - let handle = window.window_handle(); - cx.activate(true); - - let workspace_handle = this.workspace.clone(); - - // If there are multiple Zed windows, activate the correct one. - cx.defer(move |cx| { - handle - .update(cx, |_view, window, _cx| { - window.activate_window(); - - if let Some(workspace) = workspace_handle.upgrade() { - workspace.update(_cx, |workspace, cx| { - workspace.focus_panel::(window, cx); - }); - } - }) - .log_err(); - }); + self.notification_subscriptions + .entry(screen_window) + .or_insert_with(Vec::new) + .push(cx.subscribe_in(&pop_up, window, { + |this, _, event, window, cx| match event { + AgentNotificationEvent::Accepted => { + let handle = window.window_handle(); + cx.activate(true); + + let workspace_handle = this.workspace.clone(); + + // If there are multiple Zed windows, activate the correct one. + cx.defer(move |cx| { + handle + .update(cx, |_view, window, _cx| { + window.activate_window(); + + if let Some(workspace) = workspace_handle.upgrade() { + workspace.update(_cx, |workspace, cx| { + workspace.focus_panel::(window, cx); + }); + } + }) + .log_err(); + }); - this.dismiss_notifications(cx); - } - AgentNotificationEvent::Dismissed => { - this.dismiss_notifications(cx); - } + this.dismiss_notifications(cx); } - })); - - self.notifications.push(screen_window); - - // If the user manually refocuses the original window, dismiss the popup. - self.notification_subscriptions - .entry(screen_window) - .or_insert_with(Vec::new) - .push({ - let pop_up_weak = pop_up.downgrade(); - - cx.observe_window_activation(window, move |_, window, cx| { - if window.is_window_active() { - if let Some(pop_up) = pop_up_weak.upgrade() { - pop_up.update(cx, |_, cx| { - cx.emit(AgentNotificationEvent::Dismissed); - }); - } - } - }) - }); - } + AgentNotificationEvent::Dismissed => { + this.dismiss_notifications(cx); + } + } + })); + + self.notifications.push(screen_window); + + // If the user manually refocuses the original window, dismiss the popup. + self.notification_subscriptions + .entry(screen_window) + .or_insert_with(Vec::new) + .push({ + let pop_up_weak = pop_up.downgrade(); + + cx.observe_window_activation(window, move |_, window, cx| { + if window.is_window_active() + && let Some(pop_up) = pop_up_weak.upgrade() + { + pop_up.update(cx, |_, cx| { + cx.emit(AgentNotificationEvent::Dismissed); + }); + } + }) + }); } } @@ -1374,12 +1370,12 @@ impl ActiveThread { editor.focus_handle(cx).focus(window); editor.move_to_end(&editor::actions::MoveToEnd, window, cx); }); - let buffer_edited_subscription = cx.subscribe(&editor, |this, _, event, cx| match event { - EditorEvent::BufferEdited => { - this.update_editing_message_token_count(true, cx); - } - _ => {} - }); + let buffer_edited_subscription = + cx.subscribe(&editor, |this, _, event: &EditorEvent, cx| { + if event == &EditorEvent::BufferEdited { + this.update_editing_message_token_count(true, cx); + } + }); let context_picker_menu_handle = PopoverMenuHandle::default(); let context_strip = cx.new(|cx| { @@ -1766,7 +1762,7 @@ impl ActiveThread { .thread .read(cx) .message(message_id) - .map(|msg| msg.to_string()) + .map(|msg| msg.to_message_content()) .unwrap_or_default(); telemetry::event!( @@ -1896,8 +1892,9 @@ impl ActiveThread { (colors.editor_background, colors.panel_background) }; - let open_as_markdown = IconButton::new(("open-as-markdown", ix), IconName::DocumentText) - .icon_size(IconSize::XSmall) + let open_as_markdown = IconButton::new(("open-as-markdown", ix), IconName::FileMarkdown) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(Color::Ignored) .tooltip(Tooltip::text("Open Thread as Markdown")) .on_click({ @@ -1911,8 +1908,9 @@ impl ActiveThread { } }); - let scroll_to_top = IconButton::new(("scroll_to_top", ix), IconName::ArrowUpAlt) - .icon_size(IconSize::XSmall) + let scroll_to_top = IconButton::new(("scroll_to_top", ix), IconName::ArrowUp) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(Color::Ignored) .tooltip(Tooltip::text("Scroll To Top")) .on_click(cx.listener(move |this, _, _, cx| { @@ -1926,6 +1924,7 @@ impl ActiveThread { .py_2() .px(RESPONSE_PADDING_X) .mr_1() + .gap_1() .opacity(0.4) .hover(|style| style.opacity(1.)) .gap_1p5() @@ -1949,7 +1948,8 @@ impl ActiveThread { h_flex() .child( IconButton::new(("feedback-thumbs-up", ix), IconName::ThumbsUp) - .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(match feedback { ThreadFeedback::Positive => Color::Accent, ThreadFeedback::Negative => Color::Ignored, @@ -1966,7 +1966,8 @@ impl ActiveThread { ) .child( IconButton::new(("feedback-thumbs-down", ix), IconName::ThumbsDown) - .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(match feedback { ThreadFeedback::Positive => Color::Ignored, ThreadFeedback::Negative => Color::Accent, @@ -1999,7 +2000,8 @@ impl ActiveThread { h_flex() .child( IconButton::new(("feedback-thumbs-up", ix), IconName::ThumbsUp) - .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(Color::Ignored) .tooltip(Tooltip::text("Helpful Response")) .on_click(cx.listener(move |this, _, window, cx| { @@ -2013,7 +2015,8 @@ impl ActiveThread { ) .child( IconButton::new(("feedback-thumbs-down", ix), IconName::ThumbsDown) - .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(Color::Ignored) .tooltip(Tooltip::text("Not Helpful")) .on_click(cx.listener(move |this, _, window, cx| { @@ -2106,7 +2109,7 @@ impl ActiveThread { .gap_1() .children(message_content) .when_some(editing_message_state, |this, state| { - let focus_handle = state.editor.focus_handle(cx).clone(); + let focus_handle = state.editor.focus_handle(cx); this.child( h_flex() @@ -2167,7 +2170,6 @@ impl ActiveThread { .icon_color(Color::Muted) .icon_size(IconSize::Small) .tooltip({ - let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( "Regenerate", @@ -2240,9 +2242,7 @@ impl ActiveThread { let after_editing_message = self .editing_message .as_ref() - .map_or(false, |(editing_message_id, _)| { - message_id > *editing_message_id - }); + .is_some_and(|(editing_message_id, _)| message_id > *editing_message_id); let backdrop = div() .id(("backdrop", ix)) @@ -2262,13 +2262,12 @@ impl ActiveThread { let mut error = None; if let Some(last_restore_checkpoint) = self.thread.read(cx).last_restore_checkpoint() + && last_restore_checkpoint.message_id() == message_id { - if last_restore_checkpoint.message_id() == message_id { - match last_restore_checkpoint { - LastRestoreCheckpoint::Pending { .. } => is_pending = true, - LastRestoreCheckpoint::Error { error: err, .. } => { - error = Some(err.clone()); - } + match last_restore_checkpoint { + LastRestoreCheckpoint::Pending { .. } => is_pending = true, + LastRestoreCheckpoint::Error { error: err, .. } => { + error = Some(err.clone()); } } } @@ -2309,7 +2308,7 @@ impl ActiveThread { .into_any_element() } else if let Some(error) = error { restore_checkpoint_button - .tooltip(Tooltip::text(error.to_string())) + .tooltip(Tooltip::text(error)) .into_any_element() } else { restore_checkpoint_button.into_any_element() @@ -2350,7 +2349,6 @@ impl ActiveThread { this.submit_feedback_message(message_id, cx); cx.notify(); })) - .on_action(cx.listener(Self::confirm_editing_message)) .mb_2() .mx_4() .p_2() @@ -2466,7 +2464,7 @@ impl ActiveThread { message_id, index, content.clone(), - &scroll_handle, + scroll_handle, Some(index) == pending_thinking_segment_index, window, cx, @@ -2590,7 +2588,7 @@ impl ActiveThread { .id(("message-container", ix)) .py_1() .px_2p5() - .child(Banner::new().severity(ui::Severity::Warning).child(message)) + .child(Banner::new().severity(Severity::Warning).child(message)) } fn render_message_thinking_segment( @@ -2750,7 +2748,7 @@ impl ActiveThread { h_flex() .gap_1p5() .child( - Icon::new(IconName::LightBulb) + Icon::new(IconName::ToolThink) .size(IconSize::XSmall) .color(Color::Muted), ) @@ -3362,7 +3360,7 @@ impl ActiveThread { .mr_0p5(), ) .child( - IconButton::new("open-prompt-library", IconName::ArrowUpRightAlt) + IconButton::new("open-prompt-library", IconName::ArrowUpRight) .shape(ui::IconButtonShape::Square) .icon_size(IconSize::XSmall) .icon_color(Color::Ignored) @@ -3397,7 +3395,7 @@ impl ActiveThread { .mr_0p5(), ) .child( - IconButton::new("open-rule", IconName::ArrowUpRightAlt) + IconButton::new("open-rule", IconName::ArrowUpRight) .shape(ui::IconButtonShape::Square) .icon_size(IconSize::XSmall) .icon_color(Color::Ignored) @@ -4013,7 +4011,7 @@ mod tests { cx.run_until_parked(); - // Verify that the previous completion was cancelled + // Verify that the previous completion was canceled assert_eq!(cancellation_events.lock().unwrap().len(), 1); // Verify that a new request was started after cancellation diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 02c15b7e4164c298a2c10b854824f9a6f14b521b..00e48efdacf54ebb108ceb7ae6bb85f7c49bba8f 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -96,7 +96,7 @@ impl AgentConfiguration { let mut expanded_provider_configurations = HashMap::default(); if LanguageModelRegistry::read_global(cx) .provider(&ZED_CLOUD_PROVIDER_ID) - .map_or(false, |cloud_provider| cloud_provider.must_accept_terms(cx)) + .is_some_and(|cloud_provider| cloud_provider.must_accept_terms(cx)) { expanded_provider_configurations.insert(ZED_CLOUD_PROVIDER_ID, true); } @@ -137,7 +137,11 @@ impl AgentConfiguration { window: &mut Window, cx: &mut Context, ) { - let configuration_view = provider.configuration_view(window, cx); + let configuration_view = provider.configuration_view( + language_model::ConfigurationViewTargetAgent::ZedAgent, + window, + cx, + ); self.configuration_views_by_provider .insert(provider.id(), configuration_view); } @@ -161,8 +165,8 @@ impl AgentConfiguration { provider: &Arc, cx: &mut Context, ) -> impl IntoElement + use<> { - let provider_id = provider.id().0.clone(); - let provider_name = provider.name().0.clone(); + let provider_id = provider.id().0; + let provider_name = provider.name().0; let provider_id_string = SharedString::from(format!("provider-disclosure-{provider_id}")); let configuration_view = self @@ -188,7 +192,7 @@ impl AgentConfiguration { let is_signed_in = self .workspace .read_with(cx, |workspace, _| { - workspace.client().status().borrow().is_connected() + !workspace.client().status().borrow().is_signed_out() }) .unwrap_or(false); @@ -265,7 +269,7 @@ impl AgentConfiguration { .closed_icon(IconName::ChevronDown), ) .on_click(cx.listener({ - let provider_id = provider.id().clone(); + let provider_id = provider.id(); move |this, _event, _window, _cx| { let is_expanded = this .expanded_provider_configurations @@ -300,6 +304,7 @@ impl AgentConfiguration { ) .child( div() + .w_full() .px_2() .when(is_expanded, |parent| match configuration_view { Some(configuration_view) => parent.child(configuration_view), @@ -465,7 +470,7 @@ impl AgentConfiguration { "modifier-send", "Use modifier to submit a message", Some( - "Make a modifier (cmd-enter on macOS, ctrl-enter on Linux) required to send messages.".into(), + "Make a modifier (cmd-enter on macOS, ctrl-enter on Linux or Windows) required to send messages.".into(), ), use_modifier_to_send, move |state, _window, cx| { @@ -573,7 +578,7 @@ impl AgentConfiguration { .style(ButtonStyle::Filled) .layer(ElevationIndex::ModalSurface) .full_width() - .icon(IconName::Hammer) + .icon(IconName::ToolHammer) .icon_size(IconSize::Small) .icon_position(IconPosition::Start) .on_click(|_event, window, cx| { @@ -660,7 +665,7 @@ impl AgentConfiguration { .size(IconSize::XSmall) .color(Color::Accent) .with_animation( - SharedString::from(format!("{}-starting", context_server_id.0.clone(),)), + SharedString::from(format!("{}-starting", context_server_id.0,)), Animation::new(Duration::from_secs(3)).repeat(), |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), ) @@ -860,7 +865,6 @@ impl AgentConfiguration { .on_click({ let context_server_manager = self.context_server_store.clone(); - let context_server_id = context_server_id.clone(); let fs = self.fs.clone(); move |state, _window, cx| { @@ -953,7 +957,7 @@ impl AgentConfiguration { } parent.child(v_flex().py_1p5().px_1().gap_1().children( - tools.into_iter().enumerate().map(|(ix, tool)| { + tools.iter().enumerate().map(|(ix, tool)| { h_flex() .id(("tool-item", ix)) .px_1() @@ -1035,7 +1039,6 @@ fn extension_only_provides_context_server(manifest: &ExtensionManifest) -> bool && manifest.grammars.is_empty() && manifest.language_servers.is_empty() && manifest.slash_commands.is_empty() - && manifest.indexed_docs_providers.is_empty() && manifest.snippets.is_none() && manifest.debug_locators.is_empty() } @@ -1071,7 +1074,6 @@ fn show_unable_to_uninstall_extension_with_context_server( cx, move |this, _cx| { let workspace_handle = workspace_handle.clone(); - let context_server_id = context_server_id.clone(); this.icon(ToastIcon::new(IconName::Warning).color(Color::Warning)) .dismiss_button(true) diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index 401a6334886e18ef2e53bbd5b68392597d0db1e9..182831f488870997d175cce0ad7e1c94e392f1ea 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -7,10 +7,12 @@ use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, T use language_model::LanguageModelRegistry; use language_models::{ AllLanguageModelSettings, OpenAiCompatibleSettingsContent, - provider::open_ai_compatible::AvailableModel, + provider::open_ai_compatible::{AvailableModel, ModelCapabilities}, }; use settings::update_settings_file; -use ui::{Banner, KeyBinding, Modal, ModalFooter, ModalHeader, Section, prelude::*}; +use ui::{ + Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, prelude::*, +}; use ui_input::SingleLineInput; use workspace::{ModalView, Workspace}; @@ -69,11 +71,19 @@ impl AddLlmProviderInput { } } +struct ModelCapabilityToggles { + pub supports_tools: ToggleState, + pub supports_images: ToggleState, + pub supports_parallel_tool_calls: ToggleState, + pub supports_prompt_cache_key: ToggleState, +} + struct ModelInput { name: Entity, max_completion_tokens: Entity, max_output_tokens: Entity, max_tokens: Entity, + capabilities: ModelCapabilityToggles, } impl ModelInput { @@ -100,11 +110,23 @@ impl ModelInput { cx, ); let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx); + let ModelCapabilities { + tools, + images, + parallel_tool_calls, + prompt_cache_key, + } = ModelCapabilities::default(); Self { name: model_name, max_completion_tokens, max_output_tokens, max_tokens, + capabilities: ModelCapabilityToggles { + supports_tools: tools.into(), + supports_images: images.into(), + supports_parallel_tool_calls: parallel_tool_calls.into(), + supports_prompt_cache_key: prompt_cache_key.into(), + }, } } @@ -136,6 +158,12 @@ impl ModelInput { .text(cx) .parse::() .map_err(|_| SharedString::from("Max Tokens must be a number"))?, + capabilities: ModelCapabilities { + tools: self.capabilities.supports_tools.selected(), + images: self.capabilities.supports_images.selected(), + parallel_tool_calls: self.capabilities.supports_parallel_tool_calls.selected(), + prompt_cache_key: self.capabilities.supports_prompt_cache_key.selected(), + }, }) } } @@ -322,6 +350,55 @@ impl AddLlmProviderModal { .child(model.max_output_tokens.clone()), ) .child(model.max_tokens.clone()) + .child( + v_flex() + .gap_1() + .child( + Checkbox::new(("supports-tools", ix), model.capabilities.supports_tools) + .label("Supports tools") + .on_click(cx.listener(move |this, checked, _window, cx| { + this.input.models[ix].capabilities.supports_tools = *checked; + cx.notify(); + })), + ) + .child( + Checkbox::new(("supports-images", ix), model.capabilities.supports_images) + .label("Supports images") + .on_click(cx.listener(move |this, checked, _window, cx| { + this.input.models[ix].capabilities.supports_images = *checked; + cx.notify(); + })), + ) + .child( + Checkbox::new( + ("supports-parallel-tool-calls", ix), + model.capabilities.supports_parallel_tool_calls, + ) + .label("Supports parallel_tool_calls") + .on_click(cx.listener( + move |this, checked, _window, cx| { + this.input.models[ix] + .capabilities + .supports_parallel_tool_calls = *checked; + cx.notify(); + }, + )), + ) + .child( + Checkbox::new( + ("supports-prompt-cache-key", ix), + model.capabilities.supports_prompt_cache_key, + ) + .label("Supports prompt_cache_key") + .on_click(cx.listener( + move |this, checked, _window, cx| { + this.input.models[ix].capabilities.supports_prompt_cache_key = + *checked; + cx.notify(); + }, + )), + ), + ) .when(has_more_than_one_model, |this| { this.child( Button::new(("remove-model", ix), "Remove Model") @@ -377,7 +454,7 @@ impl Render for AddLlmProviderModal { this.section( Section::new().child( Banner::new() - .severity(ui::Severity::Warning) + .severity(Severity::Warning) .child(div().text_xs().child(error)), ), ) @@ -562,6 +639,93 @@ mod tests { ); } + #[gpui::test] + async fn test_model_input_default_capabilities(cx: &mut TestAppContext) { + let cx = setup_test(cx).await; + + cx.update(|window, cx| { + let model_input = ModelInput::new(window, cx); + model_input.name.update(cx, |input, cx| { + input.editor().update(cx, |editor, cx| { + editor.set_text("somemodel", window, cx); + }); + }); + assert_eq!( + model_input.capabilities.supports_tools, + ToggleState::Selected + ); + assert_eq!( + model_input.capabilities.supports_images, + ToggleState::Unselected + ); + assert_eq!( + model_input.capabilities.supports_parallel_tool_calls, + ToggleState::Unselected + ); + assert_eq!( + model_input.capabilities.supports_prompt_cache_key, + ToggleState::Unselected + ); + + let parsed_model = model_input.parse(cx).unwrap(); + assert!(parsed_model.capabilities.tools); + assert!(!parsed_model.capabilities.images); + assert!(!parsed_model.capabilities.parallel_tool_calls); + assert!(!parsed_model.capabilities.prompt_cache_key); + }); + } + + #[gpui::test] + async fn test_model_input_deselected_capabilities(cx: &mut TestAppContext) { + let cx = setup_test(cx).await; + + cx.update(|window, cx| { + let mut model_input = ModelInput::new(window, cx); + model_input.name.update(cx, |input, cx| { + input.editor().update(cx, |editor, cx| { + editor.set_text("somemodel", window, cx); + }); + }); + + model_input.capabilities.supports_tools = ToggleState::Unselected; + model_input.capabilities.supports_images = ToggleState::Unselected; + model_input.capabilities.supports_parallel_tool_calls = ToggleState::Unselected; + model_input.capabilities.supports_prompt_cache_key = ToggleState::Unselected; + + let parsed_model = model_input.parse(cx).unwrap(); + assert!(!parsed_model.capabilities.tools); + assert!(!parsed_model.capabilities.images); + assert!(!parsed_model.capabilities.parallel_tool_calls); + assert!(!parsed_model.capabilities.prompt_cache_key); + }); + } + + #[gpui::test] + async fn test_model_input_with_name_and_capabilities(cx: &mut TestAppContext) { + let cx = setup_test(cx).await; + + cx.update(|window, cx| { + let mut model_input = ModelInput::new(window, cx); + model_input.name.update(cx, |input, cx| { + input.editor().update(cx, |editor, cx| { + editor.set_text("somemodel", window, cx); + }); + }); + + model_input.capabilities.supports_tools = ToggleState::Selected; + model_input.capabilities.supports_images = ToggleState::Unselected; + model_input.capabilities.supports_parallel_tool_calls = ToggleState::Selected; + model_input.capabilities.supports_prompt_cache_key = ToggleState::Unselected; + + let parsed_model = model_input.parse(cx).unwrap(); + assert_eq!(parsed_model.name, "somemodel"); + assert!(parsed_model.capabilities.tools); + assert!(!parsed_model.capabilities.images); + assert!(parsed_model.capabilities.parallel_tool_calls); + assert!(!parsed_model.capabilities.prompt_cache_key); + }); + } + async fn setup_test(cx: &mut TestAppContext) -> &mut VisualTestContext { cx.update(|cx| { let store = SettingsStore::test(cx); diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 06d035d836853068c8ed402ee0e85ff85d9af6b2..c898a5acb5b8d0a45780efb383ece19b4cfe289d 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -163,10 +163,10 @@ impl ConfigurationSource { .read(cx) .text(cx); let settings = serde_json_lenient::from_str::(&text)?; - if let Some(settings_validator) = settings_validator { - if let Err(error) = settings_validator.validate(&settings) { - return Err(anyhow::anyhow!(error.to_string())); - } + if let Some(settings_validator) = settings_validator + && let Err(error) = settings_validator.validate(&settings) + { + return Err(anyhow::anyhow!(error.to_string())); } Ok(( id.clone(), @@ -261,7 +261,6 @@ impl ConfigureContextServerModal { _cx: &mut Context, ) { workspace.register_action({ - let language_registry = language_registry.clone(); move |_workspace, _: &AddContextServer, window, cx| { let workspace_handle = cx.weak_entity(); let language_registry = language_registry.clone(); @@ -438,7 +437,7 @@ impl ConfigureContextServerModal { format!("{} configured successfully.", id.0), cx, |this, _cx| { - this.icon(ToastIcon::new(IconName::Hammer).color(Color::Muted)) + this.icon(ToastIcon::new(IconName::ToolHammer).color(Color::Muted)) .action("Dismiss", |_, _| {}) }, ); @@ -487,7 +486,7 @@ impl ConfigureContextServerModal { } fn render_modal_description(&self, window: &mut Window, cx: &mut Context) -> AnyElement { - const MODAL_DESCRIPTION: &'static str = "Visit the MCP server configuration docs to find all necessary arguments and environment variables."; + const MODAL_DESCRIPTION: &str = "Visit the MCP server configuration docs to find all necessary arguments and environment variables."; if let ConfigurationSource::Extension { installation_instructions: Some(installation_instructions), @@ -567,7 +566,7 @@ impl ConfigureContextServerModal { Button::new("open-repository", "Open Repository") .icon(IconName::ArrowUpRight) .icon_color(Color::Muted) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .tooltip({ let repository_url = repository_url.clone(); move |window, cx| { @@ -716,24 +715,24 @@ fn wait_for_context_server( project::context_server_store::Event::ServerStatusChanged { server_id, status } => { match status { ContextServerStatus::Running => { - if server_id == &context_server_id { - if let Some(tx) = tx.lock().unwrap().take() { - let _ = tx.send(Ok(())); - } + if server_id == &context_server_id + && let Some(tx) = tx.lock().unwrap().take() + { + let _ = tx.send(Ok(())); } } ContextServerStatus::Stopped => { - if server_id == &context_server_id { - if let Some(tx) = tx.lock().unwrap().take() { - let _ = tx.send(Err("Context server stopped running".into())); - } + if server_id == &context_server_id + && let Some(tx) = tx.lock().unwrap().take() + { + let _ = tx.send(Err("Context server stopped running".into())); } } ContextServerStatus::Error(error) => { - if server_id == &context_server_id { - if let Some(tx) = tx.lock().unwrap().take() { - let _ = tx.send(Err(error.clone())); - } + if server_id == &context_server_id + && let Some(tx) = tx.lock().unwrap().take() + { + let _ = tx.send(Err(error.clone())); } } _ => {} diff --git a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs index 5d44bb2d9218cc072cf3d245638b29ff98111861..7fcf76d1cbec64ab99f3f97998600d6d9889207a 100644 --- a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs +++ b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs @@ -464,7 +464,7 @@ impl ManageProfilesModal { }, )) .child(ListSeparator) - .child(h_flex().p_2().child(mode.name_editor.clone())) + .child(h_flex().p_2().child(mode.name_editor)) } fn render_view_profile( @@ -594,7 +594,7 @@ impl ManageProfilesModal { .inset(true) .spacing(ListItemSpacing::Sparse) .start_slot( - Icon::new(IconName::Hammer) + Icon::new(IconName::ToolHammer) .size(IconSize::Small) .color(Color::Muted), ) @@ -763,7 +763,7 @@ impl Render for ManageProfilesModal { .pb_1() .child(ProfileModalHeader::new( format!("{profile_name} — Configure MCP Tools"), - Some(IconName::Hammer), + Some(IconName::ToolHammer), )) .child(ListSeparator) .child(tool_picker.clone()) diff --git a/crates/agent_ui/src/agent_configuration/tool_picker.rs b/crates/agent_ui/src/agent_configuration/tool_picker.rs index 8f1e0d71c0bd8ef56a71c1a88db1bf67929b060c..25947a1e589d96f1f4bfc64a04f43f48153420b9 100644 --- a/crates/agent_ui/src/agent_configuration/tool_picker.rs +++ b/crates/agent_ui/src/agent_configuration/tool_picker.rs @@ -191,10 +191,10 @@ impl PickerDelegate for ToolPickerDelegate { BTreeMap::default(); for item in all_items.iter() { - if let PickerItem::Tool { server_id, name } = item.clone() { - if name.contains(&query) { - tools_by_provider.entry(server_id).or_default().push(name); - } + if let PickerItem::Tool { server_id, name } = item.clone() + && name.contains(&query) + { + tools_by_provider.entry(server_id).or_default().push(name); } } diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index e1ceaf761dbb1e818235610ba232cc9a1d150132..e07424987c48ef917b9470adbdfdbc6812957967 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -1,9 +1,9 @@ use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll}; use acp_thread::{AcpThread, AcpThreadEvent}; +use action_log::ActionLog; use agent::{Thread, ThreadEvent, ThreadSummary}; use agent_settings::AgentSettings; use anyhow::Result; -use assistant_tool::ActionLog; use buffer_diff::DiffHunkStatus; use collections::{HashMap, HashSet}; use editor::{ @@ -185,7 +185,7 @@ impl AgentDiffPane { let focus_handle = cx.focus_handle(); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let project = thread.project(cx).clone(); + let project = thread.project(cx); let editor = cx.new(|cx| { let mut editor = Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx); @@ -196,27 +196,24 @@ impl AgentDiffPane { editor }); - let action_log = thread.action_log(cx).clone(); + let action_log = thread.action_log(cx); let mut this = Self { - _subscriptions: [ - Some( - cx.observe_in(&action_log, window, |this, _action_log, window, cx| { - this.update_excerpts(window, cx) - }), - ), + _subscriptions: vec![ + cx.observe_in(&action_log, window, |this, _action_log, window, cx| { + this.update_excerpts(window, cx) + }), match &thread { - AgentDiffThread::Native(thread) => { - Some(cx.subscribe(&thread, |this, _thread, event, cx| { - this.handle_thread_event(event, cx) - })) - } - AgentDiffThread::AcpThread(_) => None, + AgentDiffThread::Native(thread) => cx + .subscribe(thread, |this, _thread, event, cx| { + this.handle_native_thread_event(event, cx) + }), + AgentDiffThread::AcpThread(thread) => cx + .subscribe(thread, |this, _thread, event, cx| { + this.handle_acp_thread_event(event, cx) + }), }, - ] - .into_iter() - .flatten() - .collect(), + ], title: SharedString::default(), multibuffer, editor, @@ -288,7 +285,7 @@ impl AgentDiffPane { && buffer .read(cx) .file() - .map_or(false, |file| file.disk_state() == DiskState::Deleted) + .is_some_and(|file| file.disk_state() == DiskState::Deleted) { editor.fold_buffer(snapshot.text.remote_id(), cx) } @@ -324,10 +321,15 @@ impl AgentDiffPane { } } - fn handle_thread_event(&mut self, event: &ThreadEvent, cx: &mut Context) { - match event { - ThreadEvent::SummaryGenerated => self.update_title(cx), - _ => {} + fn handle_native_thread_event(&mut self, event: &ThreadEvent, cx: &mut Context) { + if let ThreadEvent::SummaryGenerated = event { + self.update_title(cx) + } + } + + fn handle_acp_thread_event(&mut self, event: &AcpThreadEvent, cx: &mut Context) { + if let AcpThreadEvent::TitleUpdated = event { + self.update_title(cx) } } @@ -398,7 +400,7 @@ fn keep_edits_in_selection( .disjoint_anchor_ranges() .collect::>(); - keep_edits_in_ranges(editor, buffer_snapshot, &thread, ranges, window, cx) + keep_edits_in_ranges(editor, buffer_snapshot, thread, ranges, window, cx) } fn reject_edits_in_selection( @@ -412,7 +414,7 @@ fn reject_edits_in_selection( .selections .disjoint_anchor_ranges() .collect::>(); - reject_edits_in_ranges(editor, buffer_snapshot, &thread, ranges, window, cx) + reject_edits_in_ranges(editor, buffer_snapshot, thread, ranges, window, cx) } fn keep_edits_in_ranges( @@ -503,8 +505,7 @@ fn update_editor_selection( &[last_kept_hunk_end..editor::Anchor::max()], buffer_snapshot, ) - .skip(1) - .next() + .nth(1) }) .or_else(|| { let first_kept_hunk = diff_hunks.first()?; @@ -1001,7 +1002,7 @@ impl AgentDiffToolbar { return; }; - *state = agent_diff.read(cx).editor_state(&editor); + *state = agent_diff.read(cx).editor_state(editor); self.update_location(cx); cx.notify(); } @@ -1044,23 +1045,23 @@ impl ToolbarItemView for AgentDiffToolbar { return self.location(cx); } - if let Some(editor) = item.act_as::(cx) { - if editor.read(cx).mode().is_full() { - let agent_diff = AgentDiff::global(cx); + if let Some(editor) = item.act_as::(cx) + && editor.read(cx).mode().is_full() + { + let agent_diff = AgentDiff::global(cx); - self.active_item = Some(AgentDiffToolbarItem::Editor { - editor: editor.downgrade(), - state: agent_diff.read(cx).editor_state(&editor.downgrade()), - _diff_subscription: cx.observe(&agent_diff, Self::handle_diff_notify), - }); + self.active_item = Some(AgentDiffToolbarItem::Editor { + editor: editor.downgrade(), + state: agent_diff.read(cx).editor_state(&editor.downgrade()), + _diff_subscription: cx.observe(&agent_diff, Self::handle_diff_notify), + }); - return self.location(cx); - } + return self.location(cx); } } self.active_item = None; - return self.location(cx); + self.location(cx) } fn pane_focus_update( @@ -1311,7 +1312,7 @@ impl AgentDiff { let entity = cx.new(|_cx| Self::default()); let global = AgentDiffGlobal(entity.clone()); cx.set_global(global); - entity.clone() + entity }) } @@ -1333,7 +1334,7 @@ impl AgentDiff { window: &mut Window, cx: &mut Context, ) { - let action_log = thread.action_log(cx).clone(); + let action_log = thread.action_log(cx); let action_log_subscription = cx.observe_in(&action_log, window, { let workspace = workspace.clone(); @@ -1343,13 +1344,13 @@ impl AgentDiff { }); let thread_subscription = match &thread { - AgentDiffThread::Native(thread) => cx.subscribe_in(&thread, window, { + AgentDiffThread::Native(thread) => cx.subscribe_in(thread, window, { let workspace = workspace.clone(); move |this, _thread, event, window, cx| { this.handle_native_thread_event(&workspace, event, window, cx) } }), - AgentDiffThread::AcpThread(thread) => cx.subscribe_in(&thread, window, { + AgentDiffThread::AcpThread(thread) => cx.subscribe_in(thread, window, { let workspace = workspace.clone(); move |this, thread, event, window, cx| { this.handle_acp_thread_event(&workspace, thread, event, window, cx) @@ -1357,11 +1358,11 @@ impl AgentDiff { }), }; - if let Some(workspace_thread) = self.workspace_threads.get_mut(&workspace) { + if let Some(workspace_thread) = self.workspace_threads.get_mut(workspace) { // replace thread and action log subscription, but keep editors workspace_thread.thread = thread.downgrade(); workspace_thread._thread_subscriptions = (action_log_subscription, thread_subscription); - self.update_reviewing_editors(&workspace, window, cx); + self.update_reviewing_editors(workspace, window, cx); return; } @@ -1506,7 +1507,7 @@ impl AgentDiff { .read(cx) .entries() .last() - .map_or(false, |entry| entry.diffs().next().is_some()) + .is_some_and(|entry| entry.diffs().next().is_some()) { self.update_reviewing_editors(workspace, window, cx); } @@ -1516,15 +1517,19 @@ impl AgentDiff { .read(cx) .entries() .get(*ix) - .map_or(false, |entry| entry.diffs().next().is_some()) + .is_some_and(|entry| entry.diffs().next().is_some()) { self.update_reviewing_editors(workspace, window, cx); } } - AcpThreadEvent::Stopped + AcpThreadEvent::Stopped | AcpThreadEvent::Error | AcpThreadEvent::LoadError(_) => { + self.update_reviewing_editors(workspace, window, cx); + } + AcpThreadEvent::TitleUpdated + | AcpThreadEvent::TokenUsageUpdated + | AcpThreadEvent::EntriesRemoved(_) | AcpThreadEvent::ToolAuthorizationRequired - | AcpThreadEvent::Error - | AcpThreadEvent::ServerExited(_) => {} + | AcpThreadEvent::Retry(_) => {} } } @@ -1535,21 +1540,11 @@ impl AgentDiff { window: &mut Window, cx: &mut Context, ) { - match event { - workspace::Event::ItemAdded { item } => { - if let Some(editor) = item.downcast::() { - if let Some(buffer) = Self::full_editor_buffer(editor.read(cx), cx) { - self.register_editor( - workspace.downgrade(), - buffer.clone(), - editor, - window, - cx, - ); - } - } - } - _ => {} + if let workspace::Event::ItemAdded { item } = event + && let Some(editor) = item.downcast::() + && let Some(buffer) = Self::full_editor_buffer(editor.read(cx), cx) + { + self.register_editor(workspace.downgrade(), buffer, editor, window, cx); } } @@ -1648,7 +1643,7 @@ impl AgentDiff { continue; }; - for (weak_editor, _) in buffer_editors { + for weak_editor in buffer_editors.keys() { let Some(editor) = weak_editor.upgrade() else { continue; }; @@ -1676,7 +1671,7 @@ impl AgentDiff { editor.register_addon(EditorAgentDiffAddon); }); } else { - unaffected.remove(&weak_editor); + unaffected.remove(weak_editor); } if new_state == EditorState::Reviewing && previous_state != Some(new_state) { @@ -1709,7 +1704,7 @@ impl AgentDiff { .read_with(cx, |editor, _cx| editor.workspace()) .ok() .flatten() - .map_or(false, |editor_workspace| { + .is_some_and(|editor_workspace| { editor_workspace.entity_id() == workspace.entity_id() }); @@ -1729,7 +1724,7 @@ impl AgentDiff { fn editor_state(&self, editor: &WeakEntity) -> EditorState { self.reviewing_editors - .get(&editor) + .get(editor) .cloned() .unwrap_or(EditorState::Idle) } @@ -1849,26 +1844,26 @@ impl AgentDiff { let thread = thread.upgrade()?; - if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx) { - if let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton() { - let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx); - - let mut keys = changed_buffers.keys().cycle(); - keys.find(|k| *k == &curr_buffer); - let next_project_path = keys - .next() - .filter(|k| *k != &curr_buffer) - .and_then(|after| after.read(cx).project_path(cx)); - - if let Some(path) = next_project_path { - let task = workspace.open_path(path, None, true, window, cx); - let task = cx.spawn(async move |_, _cx| task.await.map(|_| ())); - return Some(task); - } + if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx) + && let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton() + { + let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx); + + let mut keys = changed_buffers.keys().cycle(); + keys.find(|k| *k == &curr_buffer); + let next_project_path = keys + .next() + .filter(|k| *k != &curr_buffer) + .and_then(|after| after.read(cx).project_path(cx)); + + if let Some(path) = next_project_path { + let task = workspace.open_path(path, None, true, window, cx); + let task = cx.spawn(async move |_, _cx| task.await.map(|_| ())); + return Some(task); } } - return Some(Task::ready(Ok(()))); + Some(Task::ready(Ok(()))) } } diff --git a/crates/agent_ui/src/agent_model_selector.rs b/crates/agent_ui/src/agent_model_selector.rs index b989e7bf1e9147c7f6beb90b5054120cef7b818f..3de1027d91f6d613e9f3aa723b345e5d5f17ee6f 100644 --- a/crates/agent_ui/src/agent_model_selector.rs +++ b/crates/agent_ui/src/agent_model_selector.rs @@ -66,10 +66,8 @@ impl AgentModelSelector { fs.clone(), cx, move |settings, _cx| { - settings.set_inline_assistant_model( - provider.clone(), - model_id.clone(), - ); + settings + .set_inline_assistant_model(provider.clone(), model_id); }, ); } diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 6b8e36066baf6de905b045a7ea32ee36263058fb..65a9da573ad554d9d70f17aaec8b7c430cd6ec4e 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1,23 +1,22 @@ -use std::cell::RefCell; use std::ops::{Not, Range}; use std::path::Path; use std::rc::Rc; use std::sync::Arc; use std::time::Duration; -use agent_servers::AgentServer; +use acp_thread::AcpThread; +use agent2::{DbThreadMetadata, HistoryEntry}; use db::kvp::{Dismissable, KEY_VALUE_STORE}; use serde::{Deserialize, Serialize}; -use crate::NewExternalAgentThread; +use crate::acp::{AcpThreadHistory, ThreadHistoryEvent}; use crate::agent_diff::AgentDiffThread; -use crate::message_editor::{MAX_EDITOR_LINES, MIN_EDITOR_LINES}; -use crate::ui::NewThreadButton; use crate::{ AddContextServer, AgentDiffPane, ContinueThread, ContinueWithBurnMode, DeleteRecentlyOpenThread, ExpandMessageEditor, Follow, InlineAssistant, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, - ResetTrialUpsell, ToggleBurnMode, ToggleContextPicker, ToggleNavigationMenu, ToggleOptionsMenu, + ResetTrialUpsell, ToggleBurnMode, ToggleContextPicker, ToggleNavigationMenu, + ToggleNewThreadMenu, ToggleOptionsMenu, acp::AcpThreadView, active_thread::{self, ActiveThread, ActiveThreadEvent}, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, @@ -31,6 +30,7 @@ use crate::{ thread_history::{HistoryEntryElement, ThreadHistory}, ui::{AgentOnboardingModal, EndTrialUpsell}, }; +use crate::{ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary}; use agent::{ Thread, ThreadError, ThreadEvent, ThreadId, ThreadSummary, TokenUsageRatio, context_store::ContextStore, @@ -46,13 +46,12 @@ use assistant_tool::ToolWorkingSet; use client::{UserStore, zed_urls}; use cloud_llm_client::{CompletionIntent, Plan, UsageLimit}; use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; -use feature_flags::{self, FeatureFlagAppExt}; +use feature_flags::{self, ClaudeCodeFeatureFlag, FeatureFlagAppExt, GeminiAndNativeFeatureFlag}; use fs::Fs; use gpui::{ Action, Animation, AnimationExt as _, AnyElement, App, AsyncWindowContext, ClipboardItem, - Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, Hsla, - KeyContext, Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, - pulsating_between, + Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, + Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, }; use language::LanguageRegistry; use language_model::{ @@ -86,6 +85,7 @@ const AGENT_PANEL_KEY: &str = "agent_panel"; #[derive(Serialize, Deserialize)] struct SerializedAgentPanel { width: Option, + selected_agent: Option, } pub fn init(cx: &mut App) { @@ -98,6 +98,16 @@ pub fn init(cx: &mut App) { workspace.focus_panel::(window, cx); } }) + .register_action( + |workspace, action: &NewNativeAgentThreadFromSummary, window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.new_native_agent_thread_from_summary(action, window, cx) + }); + workspace.focus_panel::(window, cx); + } + }, + ) .register_action(|workspace, _: &OpenHistory, window, cx| { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); @@ -120,7 +130,7 @@ pub fn init(cx: &mut App) { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); panel.update(cx, |panel, cx| { - panel.new_external_thread(action.agent, window, cx) + panel.external_thread(action.agent, None, None, window, cx) }); } }) @@ -179,6 +189,14 @@ pub fn init(cx: &mut App) { }); } }) + .register_action(|workspace, _: &ToggleNewThreadMenu, window, cx| { + if let Some(panel) = workspace.panel::(cx) { + workspace.focus_panel::(window, cx); + panel.update(cx, |panel, cx| { + panel.toggle_new_thread_menu(&ToggleNewThreadMenu, window, cx); + }); + } + }) .register_action(|workspace, _: &OpenOnboardingModal, window, cx| { AgentOnboardingModal::toggle(workspace, window, cx) }) @@ -223,6 +241,35 @@ enum WhichFontSize { None, } +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum AgentType { + #[default] + Zed, + TextThread, + Gemini, + ClaudeCode, + NativeAgent, +} + +impl AgentType { + fn label(self) -> impl Into { + match self { + Self::Zed | Self::TextThread => "Zed Agent", + Self::NativeAgent => "Agent 2", + Self::Gemini => "Gemini CLI", + Self::ClaudeCode => "Claude Code", + } + } + + fn icon(self) -> Option { + match self { + Self::Zed | Self::NativeAgent | Self::TextThread => None, + Self::Gemini => Some(IconName::AiGemini), + Self::ClaudeCode => Some(IconName::AiClaude), + } + } +} + impl ActiveView { pub fn which_font_size_used(&self) -> WhichFontSize { match self { @@ -317,7 +364,7 @@ impl ActiveView { Self::Thread { change_title_editor: editor, thread: active_thread, - message_editor: message_editor, + message_editor, _subscriptions: subscriptions, } } @@ -325,6 +372,7 @@ impl ActiveView { pub fn prompt_editor( context_editor: Entity, history_store: Entity, + acp_history_store: Entity, language_registry: Arc, window: &mut Window, cx: &mut App, @@ -402,6 +450,18 @@ impl ActiveView { ); } }); + + acp_history_store.update(cx, |history_store, cx| { + if let Some(old_path) = old_path { + history_store + .replace_recently_opened_text_thread(old_path, new_path, cx); + } else { + history_store.push_recently_opened_entry( + agent2::HistoryEntryId::TextThread(new_path.clone()), + cx, + ); + } + }); } _ => {} } @@ -430,6 +490,8 @@ pub struct AgentPanel { fs: Arc, language_registry: Arc, thread_store: Entity, + acp_history: Entity, + acp_history_store: Entity, _default_model_subscription: Subscription, context_store: Entity, prompt_store: Option>, @@ -438,8 +500,6 @@ pub struct AgentPanel { configuration_subscription: Option, local_timezone: UtcOffset, active_view: ActiveView, - acp_message_history: - Rc>>>, previous_view: Option, history_store: Entity, history: Entity, @@ -453,21 +513,27 @@ pub struct AgentPanel { zoomed: bool, pending_serialization: Option>>, onboarding: Entity, + selected_agent: AgentType, } impl AgentPanel { fn serialize(&mut self, cx: &mut Context) { let width = self.width; + let selected_agent = self.selected_agent; self.pending_serialization = Some(cx.background_spawn(async move { KEY_VALUE_STORE .write_kvp( AGENT_PANEL_KEY.into(), - serde_json::to_string(&SerializedAgentPanel { width })?, + serde_json::to_string(&SerializedAgentPanel { + width, + selected_agent: Some(selected_agent), + })?, ) .await?; anyhow::Ok(()) })); } + pub fn load( workspace: WeakEntity, prompt_builder: Arc, @@ -517,6 +583,17 @@ impl AgentPanel { None }; + // Wait for the Gemini/Native feature flag to be available. + let client = workspace.read_with(cx, |workspace, _| workspace.client().clone())?; + if !client.status().borrow().is_signed_out() { + cx.update(|_, cx| { + cx.wait_for_flag_or_timeout::( + Duration::from_secs(2), + ) + })? + .await; + } + let panel = workspace.update_in(cx, |workspace, window, cx| { let panel = cx.new(|cx| { Self::new( @@ -531,6 +608,10 @@ impl AgentPanel { if let Some(serialized_panel) = serialized_panel { panel.update(cx, |panel, cx| { panel.width = serialized_panel.width.map(|w| w.round()); + if let Some(selected_agent) = serialized_panel.selected_agent { + panel.selected_agent = selected_agent; + panel.new_agent_thread(selected_agent, window, cx); + } cx.notify(); }); } @@ -589,6 +670,29 @@ impl AgentPanel { ) }); + let acp_history_store = cx.new(|cx| agent2::HistoryStore::new(context_store.clone(), cx)); + let acp_history = cx.new(|cx| AcpThreadHistory::new(acp_history_store.clone(), window, cx)); + cx.subscribe_in( + &acp_history, + window, + |this, _, event, window, cx| match event { + ThreadHistoryEvent::Open(HistoryEntry::AcpThread(thread)) => { + this.external_thread( + Some(crate::ExternalAgent::NativeAgent), + Some(thread.clone()), + None, + window, + cx, + ); + } + ThreadHistoryEvent::Open(HistoryEntry::TextThread(thread)) => { + this.open_saved_prompt_editor(thread.path.clone(), window, cx) + .detach_and_log_err(cx); + } + }, + ) + .detach(); + cx.observe(&history_store, |_, _, cx| cx.notify()).detach(); let active_thread = cx.new(|cx| { @@ -627,6 +731,7 @@ impl AgentPanel { ActiveView::prompt_editor( context_editor, history_store.clone(), + acp_history_store.clone(), language_registry.clone(), window, cx, @@ -643,7 +748,11 @@ impl AgentPanel { let assistant_navigation_menu = ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| { if let Some(panel) = panel.upgrade() { - menu = Self::populate_recently_opened_menu_section(menu, panel, cx); + if cx.has_flag::() { + menu = Self::populate_recently_opened_menu_section_new(menu, panel, cx); + } else { + menu = Self::populate_recently_opened_menu_section_old(menu, panel, cx); + } } menu.action("View All", Box::new(OpenHistory)) .end_slot_action(DeleteRecentlyOpenThread.boxed_clone()) @@ -669,25 +778,25 @@ impl AgentPanel { .ok(); }); - let _default_model_subscription = cx.subscribe( - &LanguageModelRegistry::global(cx), - |this, _, event: &language_model::Event, cx| match event { - language_model::Event::DefaultModelChanged => match &this.active_view { - ActiveView::Thread { thread, .. } => { - thread - .read(cx) - .thread() - .clone() - .update(cx, |thread, cx| thread.get_or_init_configured_model(cx)); + let _default_model_subscription = + cx.subscribe( + &LanguageModelRegistry::global(cx), + |this, _, event: &language_model::Event, cx| { + if let language_model::Event::DefaultModelChanged = event { + match &this.active_view { + ActiveView::Thread { thread, .. } => { + thread.read(cx).thread().clone().update(cx, |thread, cx| { + thread.get_or_init_configured_model(cx) + }); + } + ActiveView::ExternalAgentThread { .. } + | ActiveView::TextThread { .. } + | ActiveView::History + | ActiveView::Configuration => {} + } } - ActiveView::ExternalAgentThread { .. } - | ActiveView::TextThread { .. } - | ActiveView::History - | ActiveView::Configuration => {} }, - _ => {} - }, - ); + ); let onboarding = cx.new(|cx| { AgentPanelOnboarding::new( @@ -719,7 +828,6 @@ impl AgentPanel { .unwrap(), inline_assist_context_store, previous_view: None, - acp_message_history: Default::default(), history_store: history_store.clone(), history: cx.new(|cx| ThreadHistory::new(weak_self, history_store, window, cx)), hovered_recent_history_item: None, @@ -732,6 +840,9 @@ impl AgentPanel { zoomed: false, pending_serialization: None, onboarding, + acp_history, + acp_history_store, + selected_agent: AgentType::default(), } } @@ -775,10 +886,10 @@ impl AgentPanel { ActiveView::Thread { thread, .. } => { thread.update(cx, |thread, cx| thread.cancel_last_completion(window, cx)); } - ActiveView::ExternalAgentThread { thread_view, .. } => { - thread_view.update(cx, |thread_element, cx| thread_element.cancel(cx)); - } - ActiveView::TextThread { .. } | ActiveView::History | ActiveView::Configuration => {} + ActiveView::ExternalAgentThread { .. } + | ActiveView::TextThread { .. } + | ActiveView::History + | ActiveView::Configuration => {} } } @@ -792,7 +903,20 @@ impl AgentPanel { } } + fn active_thread_view(&self) -> Option<&Entity> { + match &self.active_view { + ActiveView::ExternalAgentThread { thread_view } => Some(thread_view), + ActiveView::Thread { .. } + | ActiveView::TextThread { .. } + | ActiveView::History + | ActiveView::Configuration => None, + } + } + fn new_thread(&mut self, action: &NewThread, window: &mut Window, cx: &mut Context) { + if cx.has_flag::() { + return self.new_agent_thread(AgentType::NativeAgent, window, cx); + } // Preserve chat box text when using creating new thread let preserved_text = self .active_message_editor() @@ -865,12 +989,35 @@ impl AgentPanel { message_editor.focus_handle(cx).focus(window); - let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx); + let thread_view = ActiveView::thread(active_thread, message_editor, window, cx); self.set_active_view(thread_view, window, cx); AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx); } + fn new_native_agent_thread_from_summary( + &mut self, + action: &NewNativeAgentThreadFromSummary, + window: &mut Window, + cx: &mut Context, + ) { + let Some(thread) = self + .acp_history_store + .read(cx) + .thread_from_session_id(&action.from_session_id) + else { + return; + }; + + self.external_thread( + Some(ExternalAgent::NativeAgent), + None, + Some(thread.clone()), + window, + cx, + ); + } + fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context) { let context = self .context_store @@ -897,6 +1044,7 @@ impl AgentPanel { ActiveView::prompt_editor( context_editor.clone(), self.history_store.clone(), + self.acp_history_store.clone(), self.language_registry.clone(), window, cx, @@ -907,15 +1055,17 @@ impl AgentPanel { context_editor.focus_handle(cx).focus(window); } - fn new_external_thread( + fn external_thread( &mut self, agent_choice: Option, + resume_thread: Option, + summarize_thread: Option, window: &mut Window, cx: &mut Context, ) { let workspace = self.workspace.clone(); let project = self.project.clone(); - let message_history = self.acp_message_history.clone(); + let fs = self.fs.clone(); const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent"; @@ -924,8 +1074,10 @@ impl AgentPanel { agent: crate::ExternalAgent, } + let history = self.acp_history_store.clone(); + cx.spawn_in(window, async move |this, cx| { - let server: Rc = match agent_choice { + let ext_agent = match agent_choice { Some(agent) => { cx.background_spawn(async move { if let Some(serialized) = @@ -939,10 +1091,10 @@ impl AgentPanel { }) .detach(); - agent.server() + agent } - None => cx - .background_spawn(async move { + None => { + cx.background_spawn(async move { KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) }) .await @@ -953,18 +1105,34 @@ impl AgentPanel { }) .unwrap_or_default() .agent - .server(), + } }; + let server = ext_agent.server(fs, history); + this.update_in(cx, |this, window, cx| { + match ext_agent { + crate::ExternalAgent::Gemini | crate::ExternalAgent::NativeAgent => { + if !cx.has_flag::() { + return; + } + } + crate::ExternalAgent::ClaudeCode => { + if !cx.has_flag::() { + return; + } + } + } + let thread_view = cx.new(|cx| { crate::acp::AcpThreadView::new( server, + resume_thread, + summarize_thread, workspace.clone(), project, - message_history, - MIN_EDITOR_LINES, - Some(MAX_EDITOR_LINES), + this.acp_history_store.clone(), + this.prompt_store.clone(), window, cx, ) @@ -1053,8 +1221,9 @@ impl AgentPanel { }); self.set_active_view( ActiveView::prompt_editor( - editor.clone(), + editor, self.history_store.clone(), + self.acp_history_store.clone(), self.language_registry.clone(), window, cx, @@ -1125,7 +1294,7 @@ impl AgentPanel { }); message_editor.focus_handle(cx).focus(window); - let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx); + let thread_view = ActiveView::thread(active_thread, message_editor, window, cx); self.set_active_view(thread_view, window, cx); AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx); } @@ -1173,6 +1342,15 @@ impl AgentPanel { self.agent_panel_menu_handle.toggle(window, cx); } + pub fn toggle_new_thread_menu( + &mut self, + _: &ToggleNewThreadMenu, + window: &mut Window, + cx: &mut Context, + ) { + self.new_thread_menu_handle.toggle(window, cx); + } + pub fn increase_font_size( &mut self, action: &IncreaseBufferFontSize, @@ -1203,13 +1381,11 @@ impl AgentPanel { ThemeSettings::get_global(cx).agent_font_size(cx) + delta; let _ = settings .agent_font_size - .insert(theme::clamp_font_size(agent_font_size).0); + .insert(Some(theme::clamp_font_size(agent_font_size).into())); }, ); } else { - theme::adjust_agent_font_size(cx, |size| { - *size += delta; - }); + theme::adjust_agent_font_size(cx, |size| size + delta); } } WhichFontSize::BufferFont => { @@ -1345,15 +1521,14 @@ impl AgentPanel { AssistantConfigurationEvent::NewThread(provider) => { if LanguageModelRegistry::read_global(cx) .default_model() - .map_or(true, |model| model.provider.id() != provider.id()) + .is_none_or(|model| model.provider.id() != provider.id()) + && let Some(model) = provider.default_model(cx) { - if let Some(model) = provider.default_model(cx) { - update_settings_file::( - self.fs.clone(), - cx, - move |settings, _| settings.set_model(model), - ); - } + update_settings_file::( + self.fs.clone(), + cx, + move |settings, _| settings.set_model(model), + ); } self.new_thread(&NewThread::default(), window, cx); @@ -1380,6 +1555,14 @@ impl AgentPanel { _ => None, } } + pub(crate) fn active_agent_thread(&self, cx: &App) -> Option> { + match &self.active_view { + ActiveView::ExternalAgentThread { thread_view, .. } => { + thread_view.read(cx).thread().cloned() + } + _ => None, + } + } pub(crate) fn delete_thread( &mut self, @@ -1400,7 +1583,7 @@ impl AgentPanel { return; } - let model = thread_state.configured_model().map(|cm| cm.model.clone()); + let model = thread_state.configured_model().map(|cm| cm.model); if let Some(model) = model { thread.update(cx, |active_thread, cx| { active_thread.thread().update(cx, |thread, cx| { @@ -1472,17 +1655,14 @@ impl AgentPanel { let current_is_special = current_is_history || current_is_config; let new_is_special = new_is_history || new_is_config; - match &self.active_view { - ActiveView::Thread { thread, .. } => { - let thread = thread.read(cx); - if thread.is_empty() { - let id = thread.thread().read(cx).id().clone(); - self.history_store.update(cx, |store, cx| { - store.remove_recently_opened_thread(id, cx); - }); - } + if let ActiveView::Thread { thread, .. } = &self.active_view { + let thread = thread.read(cx); + if thread.is_empty() { + let id = thread.thread().read(cx).id().clone(); + self.history_store.update(cx, |store, cx| { + store.remove_recently_opened_thread(id, cx); + }); } - _ => {} } match &new_view { @@ -1495,6 +1675,14 @@ impl AgentPanel { if let Some(path) = context_editor.read(cx).context().read(cx).path() { store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx) } + }); + self.acp_history_store.update(cx, |store, cx| { + if let Some(path) = context_editor.read(cx).context().read(cx).path() { + store.push_recently_opened_entry( + agent2::HistoryEntryId::TextThread(path.clone()), + cx, + ) + } }) } ActiveView::ExternalAgentThread { .. } => {} @@ -1512,12 +1700,10 @@ impl AgentPanel { self.active_view = new_view; } - self.acp_message_history.borrow_mut().reset_position(); - self.focus_handle(cx).focus(window); } - fn populate_recently_opened_menu_section( + fn populate_recently_opened_menu_section_old( mut menu: ContextMenu, panel: Entity, cx: &mut Context, @@ -1552,7 +1738,7 @@ impl AgentPanel { .open_thread_by_id(&id, window, cx) .detach_and_log_err(cx), HistoryEntryId::Context(path) => this - .open_saved_prompt_editor(path.clone(), window, cx) + .open_saved_prompt_editor(path, window, cx) .detach_and_log_err(cx), }) .ok(); @@ -1580,6 +1766,144 @@ impl AgentPanel { menu } + + fn populate_recently_opened_menu_section_new( + mut menu: ContextMenu, + panel: Entity, + cx: &mut Context, + ) -> ContextMenu { + let entries = panel + .read(cx) + .acp_history_store + .read(cx) + .recently_opened_entries(cx); + + if entries.is_empty() { + return menu; + } + + menu = menu.header("Recently Opened"); + + for entry in entries { + let title = entry.title().clone(); + + menu = menu.entry_with_end_slot_on_hover( + title, + None, + { + let panel = panel.downgrade(); + let entry = entry.clone(); + move |window, cx| { + let entry = entry.clone(); + panel + .update(cx, move |this, cx| match &entry { + agent2::HistoryEntry::AcpThread(entry) => this.external_thread( + Some(ExternalAgent::NativeAgent), + Some(entry.clone()), + None, + window, + cx, + ), + agent2::HistoryEntry::TextThread(entry) => this + .open_saved_prompt_editor(entry.path.clone(), window, cx) + .detach_and_log_err(cx), + }) + .ok(); + } + }, + IconName::Close, + "Close Entry".into(), + { + let panel = panel.downgrade(); + let id = entry.id(); + move |_window, cx| { + panel + .update(cx, |this, cx| { + this.acp_history_store.update(cx, |history_store, cx| { + history_store.remove_recently_opened_entry(&id, cx); + }); + }) + .ok(); + } + }, + ); + } + + menu = menu.separator(); + + menu + } + + pub fn set_selected_agent( + &mut self, + agent: AgentType, + window: &mut Window, + cx: &mut Context, + ) { + if self.selected_agent != agent { + self.selected_agent = agent; + self.serialize(cx); + } + self.new_agent_thread(agent, window, cx); + } + + pub fn selected_agent(&self) -> AgentType { + self.selected_agent + } + + pub fn new_agent_thread( + &mut self, + agent: AgentType, + window: &mut Window, + cx: &mut Context, + ) { + match agent { + AgentType::Zed => { + window.dispatch_action( + NewThread { + from_thread_id: None, + } + .boxed_clone(), + cx, + ); + } + AgentType::TextThread => { + window.dispatch_action(NewTextThread.boxed_clone(), cx); + } + AgentType::NativeAgent => self.external_thread( + Some(crate::ExternalAgent::NativeAgent), + None, + None, + window, + cx, + ), + AgentType::Gemini => { + self.external_thread(Some(crate::ExternalAgent::Gemini), None, None, window, cx) + } + AgentType::ClaudeCode => self.external_thread( + Some(crate::ExternalAgent::ClaudeCode), + None, + None, + window, + cx, + ), + } + } + + pub fn load_agent_thread( + &mut self, + thread: DbThreadMetadata, + window: &mut Window, + cx: &mut Context, + ) { + self.external_thread( + Some(ExternalAgent::NativeAgent), + Some(thread), + None, + window, + cx, + ); + } } impl Focusable for AgentPanel { @@ -1587,7 +1911,13 @@ impl Focusable for AgentPanel { match &self.active_view { ActiveView::Thread { message_editor, .. } => message_editor.focus_handle(cx), ActiveView::ExternalAgentThread { thread_view, .. } => thread_view.focus_handle(cx), - ActiveView::History => self.history.focus_handle(cx), + ActiveView::History => { + if cx.has_flag::() { + self.acp_history.focus_handle(cx) + } else { + self.history.focus_handle(cx) + } + } ActiveView::TextThread { context_editor, .. } => context_editor.focus_handle(cx), ActiveView::Configuration => { if let Some(configuration) = self.configuration.as_ref() { @@ -1709,7 +2039,7 @@ impl AgentPanel { }; match state { - ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT.clone()) + ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT) .truncate() .into_any_element(), ThreadSummary::Generating => Label::new(LOADING_SUMMARY_PLACEHOLDER) @@ -1723,7 +2053,8 @@ impl AgentPanel { .w_full() .child(change_title_editor.clone()) .child( - ui::IconButton::new("retry-summary-generation", IconName::RotateCcw) + IconButton::new("retry-summary-generation", IconName::RotateCcw) + .icon_size(IconSize::Small) .on_click({ let active_thread = active_thread.clone(); move |_, _window, cx| { @@ -1775,7 +2106,8 @@ impl AgentPanel { .w_full() .child(title_editor.clone()) .child( - ui::IconButton::new("retry-summary-generation", IconName::RotateCcw) + IconButton::new("retry-summary-generation", IconName::RotateCcw) + .icon_size(IconSize::Small) .on_click({ let context_editor = context_editor.clone(); move |_, _window, cx| { @@ -1810,75 +2142,165 @@ impl AgentPanel { .into_any() } - fn render_toolbar(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render_panel_options_menu( + &self, + window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement { let user_store = self.user_store.read(cx); let usage = user_store.model_request_usage(); - let account_url = zed_urls::account_url(cx); let focus_handle = self.focus_handle(cx); - let go_back_button = div().child( - IconButton::new("go-back", IconName::ArrowLeft) - .icon_size(IconSize::Small) - .on_click(cx.listener(|this, _, window, cx| { - this.go_back(&workspace::GoBack, window, cx); - })) - .tooltip({ + let full_screen_label = if self.is_zoomed(window, cx) { + "Disable Full Screen" + } else { + "Enable Full Screen" + }; + + PopoverMenu::new("agent-options-menu") + .trigger_with_tooltip( + IconButton::new("agent-options-menu", IconName::Ellipsis) + .icon_size(IconSize::Small), + { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Go Back", - &workspace::GoBack, + "Toggle Agent Menu", + &ToggleOptionsMenu, &focus_handle, window, cx, ) } - }), - ); - - let recent_entries_menu = div().child( - PopoverMenu::new("agent-nav-menu") - .trigger_with_tooltip( - IconButton::new("agent-nav-menu", IconName::MenuAlt) - .icon_size(IconSize::Small) - .style(ui::ButtonStyle::Subtle), - { - let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in( - "Toggle Panel Menu", - &ToggleNavigationMenu, - &focus_handle, - window, - cx, - ) - } - }, - ) - .anchor(Corner::TopLeft) - .with_handle(self.assistant_navigation_menu_handle.clone()) - .menu({ - let menu = self.assistant_navigation_menu.clone(); - move |window, cx| { - if let Some(menu) = menu.as_ref() { - menu.update(cx, |_, cx| { - cx.defer_in(window, |menu, window, cx| { - menu.rebuild(window, cx); - }); - }) + }, + ) + .anchor(Corner::TopRight) + .with_handle(self.agent_panel_menu_handle.clone()) + .menu({ + move |window, cx| { + Some(ContextMenu::build(window, cx, |mut menu, _window, _| { + menu = menu.context(focus_handle.clone()); + if let Some(usage) = usage { + menu = menu + .header_with_link("Prompt Usage", "Manage", account_url.clone()) + .custom_entry( + move |_window, cx| { + let used_percentage = match usage.limit { + UsageLimit::Limited(limit) => { + Some((usage.amount as f32 / limit as f32) * 100.) + } + UsageLimit::Unlimited => None, + }; + + h_flex() + .flex_1() + .gap_1p5() + .children(used_percentage.map(|percent| { + ProgressBar::new("usage", percent, 100., cx) + })) + .child( + Label::new(match usage.limit { + UsageLimit::Limited(limit) => { + format!("{} / {limit}", usage.amount) + } + UsageLimit::Unlimited => { + format!("{} / ∞", usage.amount) + } + }) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + }, + move |_, cx| cx.open_url(&zed_urls::account_url(cx)), + ) + .separator() } - menu.clone() + + menu = menu + .header("MCP Servers") + .action( + "View Server Extensions", + Box::new(zed_actions::Extensions { + category_filter: Some( + zed_actions::ExtensionCategoryFilter::ContextServers, + ), + id: None, + }), + ) + .action("Add Custom Server…", Box::new(AddContextServer)) + .separator(); + + menu = menu + .action("Rules…", Box::new(OpenRulesLibrary::default())) + .action("Settings", Box::new(OpenSettings)) + .separator() + .action(full_screen_label, Box::new(ToggleZoom)); + menu + })) + } + }) + } + + fn render_recent_entries_menu( + &self, + icon: IconName, + corner: Corner, + cx: &mut Context, + ) -> impl IntoElement { + let focus_handle = self.focus_handle(cx); + + PopoverMenu::new("agent-nav-menu") + .trigger_with_tooltip( + IconButton::new("agent-nav-menu", icon).icon_size(IconSize::Small), + { + move |window, cx| { + Tooltip::for_action_in( + "Toggle Recent Threads", + &ToggleNavigationMenu, + &focus_handle, + window, + cx, + ) } - }), - ); + }, + ) + .anchor(corner) + .with_handle(self.assistant_navigation_menu_handle.clone()) + .menu({ + let menu = self.assistant_navigation_menu.clone(); + move |window, cx| { + if let Some(menu) = menu.as_ref() { + menu.update(cx, |_, cx| { + cx.defer_in(window, |menu, window, cx| { + menu.rebuild(window, cx); + }); + }) + } + menu.clone() + } + }) + } - let full_screen_label = if self.is_zoomed(window, cx) { - "Disable Full Screen" - } else { - "Enable Full Screen" - }; + fn render_toolbar_back_button(&self, cx: &mut Context) -> impl IntoElement { + let focus_handle = self.focus_handle(cx); + + IconButton::new("go-back", IconName::ArrowLeft) + .icon_size(IconSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.go_back(&workspace::GoBack, window, cx); + })) + .tooltip({ + move |window, cx| { + Tooltip::for_action_in("Go Back", &workspace::GoBack, &focus_handle, window, cx) + } + }) + } + + fn render_toolbar_old(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let focus_handle = self.focus_handle(cx); let active_thread = match &self.active_view { ActiveView::Thread { thread, .. } => Some(thread.read(cx).thread().clone()), @@ -1896,15 +2318,11 @@ impl AgentPanel { .anchor(Corner::TopRight) .with_handle(self.new_thread_menu_handle.clone()) .menu({ - let focus_handle = focus_handle.clone(); move |window, cx| { let active_thread = active_thread.clone(); Some(ContextMenu::build(window, cx, |mut menu, _window, cx| { menu = menu .context(focus_handle.clone()) - .when(cx.has_flag::(), |this| { - this.header("Zed Agent") - }) .when_some(active_thread, |this, active_thread| { let thread = active_thread.read(cx); @@ -1947,72 +2365,78 @@ impl AgentPanel { .handler(move |window, cx| { window.dispatch_action(NewTextThread.boxed_clone(), cx); }), - ) - .when(cx.has_flag::(), |this| { - this.separator() - .header("External Agents") - .item( - ContextMenuEntry::new("New Gemini Thread") - .icon(IconName::AiGemini) - .icon_color(Color::Muted) - .handler(move |window, cx| { - window.dispatch_action( - NewExternalAgentThread { - agent: Some(crate::ExternalAgent::Gemini), - } - .boxed_clone(), - cx, - ); - }), - ) - .item( - ContextMenuEntry::new("New Claude Code Thread") - .icon(IconName::AiClaude) - .icon_color(Color::Muted) - .handler(move |window, cx| { - window.dispatch_action( - NewExternalAgentThread { - agent: Some( - crate::ExternalAgent::ClaudeCode, - ), - } - .boxed_clone(), - cx, - ); - }), - ) - .item( - ContextMenuEntry::new("New Native Agent Thread") - .icon(IconName::ZedAssistant) - .icon_color(Color::Muted) - .handler(move |window, cx| { - window.dispatch_action( - NewExternalAgentThread { - agent: Some( - crate::ExternalAgent::NativeAgent, - ), - } - .boxed_clone(), - cx, - ); - }), - ) - }); + ); menu })) } }); - let agent_panel_menu = PopoverMenu::new("agent-options-menu") + h_flex() + .id("assistant-toolbar") + .h(Tab::container_height(cx)) + .max_w_full() + .flex_none() + .justify_between() + .gap_2() + .bg(cx.theme().colors().tab_bar_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + h_flex() + .size_full() + .pl_1() + .gap_1() + .child(match &self.active_view { + ActiveView::History | ActiveView::Configuration => div() + .pl(DynamicSpacing::Base04.rems(cx)) + .child(self.render_toolbar_back_button(cx)) + .into_any_element(), + _ => self + .render_recent_entries_menu(IconName::MenuAlt, Corner::TopLeft, cx) + .into_any_element(), + }) + .child(self.render_title_view(window, cx)), + ) + .child( + h_flex() + .h_full() + .gap_2() + .children(self.render_token_count(cx)) + .child( + h_flex() + .h_full() + .gap(DynamicSpacing::Base02.rems(cx)) + .px(DynamicSpacing::Base08.rems(cx)) + .border_l_1() + .border_color(cx.theme().colors().border) + .child(new_thread_menu) + .child(self.render_panel_options_menu(window, cx)), + ), + ) + } + + fn render_toolbar_new(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let focus_handle = self.focus_handle(cx); + + let active_thread = match &self.active_view { + ActiveView::ExternalAgentThread { thread_view } => { + thread_view.read(cx).as_native_thread(cx) + } + ActiveView::Thread { .. } + | ActiveView::TextThread { .. } + | ActiveView::History + | ActiveView::Configuration => None, + }; + + let new_thread_menu = PopoverMenu::new("new_thread_menu") .trigger_with_tooltip( - IconButton::new("agent-options-menu", IconName::Ellipsis) - .icon_size(IconSize::Small), + IconButton::new("new_thread_menu_btn", IconName::Plus).icon_size(IconSize::Small), { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Toggle Agent Menu", - &ToggleOptionsMenu, + "New…", + &ToggleNewThreadMenu, &focus_handle, window, cx, @@ -2020,76 +2444,172 @@ impl AgentPanel { } }, ) - .anchor(Corner::TopRight) - .with_handle(self.agent_panel_menu_handle.clone()) + .anchor(Corner::TopLeft) + .with_handle(self.new_thread_menu_handle.clone()) .menu({ - let focus_handle = focus_handle.clone(); - move |window, cx| { - Some(ContextMenu::build(window, cx, |mut menu, _window, _| { - menu = menu.context(focus_handle.clone()); - if let Some(usage) = usage { - menu = menu - .header_with_link("Prompt Usage", "Manage", account_url.clone()) - .custom_entry( - move |_window, cx| { - let used_percentage = match usage.limit { - UsageLimit::Limited(limit) => { - Some((usage.amount as f32 / limit as f32) * 100.) - } - UsageLimit::Unlimited => None, - }; - - h_flex() - .flex_1() - .gap_1p5() - .children(used_percentage.map(|percent| { - ProgressBar::new("usage", percent, 100., cx) - })) - .child( - Label::new(match usage.limit { - UsageLimit::Limited(limit) => { - format!("{} / {limit}", usage.amount) - } - UsageLimit::Unlimited => { - format!("{} / ∞", usage.amount) - } - }) - .size(LabelSize::Small) - .color(Color::Muted), - ) - .into_any_element() - }, - move |_, cx| cx.open_url(&zed_urls::account_url(cx)), - ) - .separator() - } + let workspace = self.workspace.clone(); + move |window, cx| { + let active_thread = active_thread.clone(); + Some(ContextMenu::build(window, cx, |mut menu, _window, cx| { menu = menu - .header("MCP Servers") - .action( - "View Server Extensions", - Box::new(zed_actions::Extensions { - category_filter: Some( - zed_actions::ExtensionCategoryFilter::ContextServers, - ), - id: None, - }), + .context(focus_handle.clone()) + .header("Zed Agent") + .when_some(active_thread, |this, active_thread| { + let thread = active_thread.read(cx); + + if !thread.is_empty() { + let session_id = thread.id().clone(); + this.item( + ContextMenuEntry::new("New From Summary") + .icon(IconName::ThreadFromSummary) + .icon_color(Color::Muted) + .handler(move |window, cx| { + window.dispatch_action( + Box::new(NewNativeAgentThreadFromSummary { + from_session_id: session_id.clone(), + }), + cx, + ); + }), + ) + } else { + this + } + }) + .item( + ContextMenuEntry::new("New Thread") + .action(NewThread::default().boxed_clone()) + .icon(IconName::Thread) + .icon_color(Color::Muted) + .handler({ + let workspace = workspace.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.set_selected_agent( + AgentType::NativeAgent, + window, + cx, + ); + }); + } + }); + } + } + }), + ) + .item( + ContextMenuEntry::new("New Text Thread") + .icon(IconName::TextThread) + .icon_color(Color::Muted) + .action(NewTextThread.boxed_clone()) + .handler({ + let workspace = workspace.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.set_selected_agent( + AgentType::TextThread, + window, + cx, + ); + }); + } + }); + } + } + }), ) - .action("Add Custom Server…", Box::new(AddContextServer)) - .separator(); - - menu = menu - .action("Rules…", Box::new(OpenRulesLibrary::default())) - .action("Settings", Box::new(OpenSettings)) .separator() - .action(full_screen_label, Box::new(ToggleZoom)); + .header("External Agents") + .when(cx.has_flag::(), |menu| { + menu.item( + ContextMenuEntry::new("New Gemini CLI Thread") + .icon(IconName::AiGemini) + .icon_color(Color::Muted) + .handler({ + let workspace = workspace.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.set_selected_agent( + AgentType::Gemini, + window, + cx, + ); + }); + } + }); + } + } + }), + ) + }) + .when(cx.has_flag::(), |menu| { + menu.item( + ContextMenuEntry::new("New Claude Code Thread") + .icon(IconName::AiClaude) + .icon_color(Color::Muted) + .handler({ + let workspace = workspace.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.set_selected_agent( + AgentType::ClaudeCode, + window, + cx, + ); + }); + } + }); + } + } + }), + ) + }); menu })) } }); + let selected_agent_label = self.selected_agent.label().into(); + let selected_agent = div() + .id("selected_agent_icon") + .when_some(self.selected_agent.icon(), |this, icon| { + this.px(DynamicSpacing::Base02.rems(cx)) + .child(Icon::new(icon).color(Color::Muted)) + .tooltip(move |window, cx| { + Tooltip::with_meta( + selected_agent_label.clone(), + None, + "Selected Agent", + window, + cx, + ) + }) + }) + .into_any_element(); + h_flex() - .id("assistant-toolbar") + .id("agent-panel-toolbar") .h(Tab::container_height(cx)) .max_w_full() .flex_none() @@ -2101,32 +2621,42 @@ impl AgentPanel { .child( h_flex() .size_full() - .pl_1() - .gap_1() + .gap(DynamicSpacing::Base04.rems(cx)) + .pl(DynamicSpacing::Base04.rems(cx)) .child(match &self.active_view { - ActiveView::History | ActiveView::Configuration => go_back_button, - _ => recent_entries_menu, + ActiveView::History | ActiveView::Configuration => { + self.render_toolbar_back_button(cx).into_any_element() + } + _ => selected_agent.into_any_element(), }) .child(self.render_title_view(window, cx)), ) .child( h_flex() - .h_full() - .gap_2() - .children(self.render_token_count(cx)) - .child( - h_flex() - .h_full() - .gap(DynamicSpacing::Base02.rems(cx)) - .px(DynamicSpacing::Base08.rems(cx)) - .border_l_1() - .border_color(cx.theme().colors().border) - .child(new_thread_menu) - .child(agent_panel_menu), - ), + .flex_none() + .gap(DynamicSpacing::Base02.rems(cx)) + .pl(DynamicSpacing::Base04.rems(cx)) + .pr(DynamicSpacing::Base06.rems(cx)) + .child(new_thread_menu) + .child(self.render_recent_entries_menu( + IconName::MenuAltTemp, + Corner::TopRight, + cx, + )) + .child(self.render_panel_options_menu(window, cx)), ) } + fn render_toolbar(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + if cx.has_flag::() + || cx.has_flag::() + { + self.render_toolbar_new(window, cx).into_any_element() + } else { + self.render_toolbar_old(window, cx).into_any_element() + } + } + fn render_token_count(&self, cx: &App) -> Option { match &self.active_view { ActiveView::Thread { @@ -2245,9 +2775,7 @@ impl AgentPanel { } ActiveView::ExternalAgentThread { .. } | ActiveView::History - | ActiveView::Configuration => { - return None; - } + | ActiveView::Configuration => None, } } @@ -2263,7 +2791,7 @@ impl AgentPanel { .thread() .read(cx) .configured_model() - .map_or(false, |model| { + .is_some_and(|model| { model.provider.id() != language_model::ZED_CLOUD_PROVIDER_ID }) { @@ -2274,7 +2802,7 @@ impl AgentPanel { if LanguageModelRegistry::global(cx) .read(cx) .default_model() - .map_or(false, |model| { + .is_some_and(|model| { model.provider.id() != language_model::ZED_CLOUD_PROVIDER_ID }) { @@ -2298,10 +2826,19 @@ impl AgentPanel { } match &self.active_view { - ActiveView::Thread { .. } | ActiveView::TextThread { .. } => { - let history_is_empty = self - .history_store - .update(cx, |store, cx| store.recent_entries(1, cx).is_empty()); + ActiveView::History | ActiveView::Configuration => false, + ActiveView::ExternalAgentThread { thread_view, .. } + if thread_view.read(cx).as_native_thread(cx).is_none() => + { + false + } + _ => { + let history_is_empty = if cx.has_flag::() { + self.acp_history_store.read(cx).is_empty(cx) + } else { + self.history_store + .update(cx, |store, cx| store.recent_entries(1, cx).is_empty()) + }; let has_configured_non_zed_providers = LanguageModelRegistry::read_global(cx) .providers() @@ -2313,9 +2850,6 @@ impl AgentPanel { history_is_empty || !has_configured_non_zed_providers } - ActiveView::ExternalAgentThread { .. } - | ActiveView::History - | ActiveView::Configuration => false, } } @@ -2388,20 +2922,22 @@ impl AgentPanel { action_slot: Option, cx: &mut Context, ) -> impl IntoElement { - h_flex() - .mt_2() - .pl_1p5() - .pb_1() - .w_full() - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border_variant) - .child( - Label::new(label.into()) - .size(LabelSize::Small) - .color(Color::Muted), - ) - .children(action_slot) + div().pl_1().pr_1p5().child( + h_flex() + .mt_2() + .pl_1p5() + .pb_1() + .w_full() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child( + Label::new(label.into()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .children(action_slot), + ) } fn render_thread_empty_state( @@ -2507,22 +3043,12 @@ impl AgentPanel { }), ), ) - }) - .when_some(configuration_error.as_ref(), |this, err| { - this.child(self.render_configuration_error( - err, - &focus_handle, - window, - cx, - )) }), ) }) .when(!recent_history.is_empty(), |parent| { - let focus_handle = focus_handle.clone(); parent .overflow_hidden() - .p_1p5() .justify_end() .gap_1() .child( @@ -2550,14 +3076,15 @@ impl AgentPanel { ), ) .child( - v_flex() - .gap_1() - .children(recent_history.into_iter().enumerate().map( - |(index, entry)| { + v_flex().p_1().pr_1p5().gap_1().children( + recent_history + .into_iter() + .enumerate() + .map(|(index, entry)| { // TODO: Add keyboard navigation. let is_hovered = self.hovered_recent_history_item == Some(index); - HistoryEntryElement::new(entry.clone(), cx.entity().downgrade()) + HistoryEntryElement::new(entry, cx.entity().downgrade()) .hovered(is_hovered) .on_hover(cx.listener( move |this, is_hovered, _window, cx| { @@ -2572,181 +3099,92 @@ impl AgentPanel { }, )) .into_any_element() - }, - )), - ) - .child(self.render_empty_state_section_header("Start", None, cx)) - .child( - v_flex() - .p_1() - .gap_2() - .child( - h_flex() - .w_full() - .gap_2() - .child( - NewThreadButton::new( - "new-thread-btn", - "New Thread", - IconName::Thread, - ) - .keybinding(KeyBinding::for_action_in( - &NewThread::default(), - &self.focus_handle(cx), - window, - cx, - )) - .on_click( - |window, cx| { - window.dispatch_action( - NewThread::default().boxed_clone(), - cx, - ) - }, - ), - ) - .child( - NewThreadButton::new( - "new-text-thread-btn", - "New Text Thread", - IconName::TextThread, - ) - .keybinding(KeyBinding::for_action_in( - &NewTextThread, - &self.focus_handle(cx), - window, - cx, - )) - .on_click( - |window, cx| { - window.dispatch_action(Box::new(NewTextThread), cx) - }, - ), - ), - ) - .when(cx.has_flag::(), |this| { - this.child( - h_flex() - .w_full() - .gap_2() - .child( - NewThreadButton::new( - "new-gemini-thread-btn", - "New Gemini Thread", - IconName::AiGemini, - ) - // .keybinding(KeyBinding::for_action_in( - // &OpenHistory, - // &self.focus_handle(cx), - // window, - // cx, - // )) - .on_click( - |window, cx| { - window.dispatch_action( - Box::new(NewExternalAgentThread { - agent: Some( - crate::ExternalAgent::Gemini, - ), - }), - cx, - ) - }, - ), - ) - .child( - NewThreadButton::new( - "new-claude-thread-btn", - "New Claude Code Thread", - IconName::AiClaude, - ) - // .keybinding(KeyBinding::for_action_in( - // &OpenHistory, - // &self.focus_handle(cx), - // window, - // cx, - // )) - .on_click( - |window, cx| { - window.dispatch_action( - Box::new(NewExternalAgentThread { - agent: Some( - crate::ExternalAgent::ClaudeCode, - ), - }), - cx, - ) - }, - ), - ) - .child( - NewThreadButton::new( - "new-native-agent-thread-btn", - "New Native Agent Thread", - IconName::ZedAssistant, - ) - // .keybinding(KeyBinding::for_action_in( - // &OpenHistory, - // &self.focus_handle(cx), - // window, - // cx, - // )) - .on_click( - |window, cx| { - window.dispatch_action( - Box::new(NewExternalAgentThread { - agent: Some( - crate::ExternalAgent::NativeAgent, - ), - }), - cx, - ) - }, - ), - ), - ) - }), + }), + ), ) - .when_some(configuration_error.as_ref(), |this, err| { - this.child(self.render_configuration_error(err, &focus_handle, window, cx)) - }) + }) + .when_some(configuration_error.as_ref(), |this, err| { + this.child(self.render_configuration_error(false, err, &focus_handle, window, cx)) }) } fn render_configuration_error( &self, + border_bottom: bool, configuration_error: &ConfigurationError, focus_handle: &FocusHandle, window: &mut Window, cx: &mut App, ) -> impl IntoElement { - match configuration_error { - ConfigurationError::ModelNotFound - | ConfigurationError::ProviderNotAuthenticated(_) - | ConfigurationError::NoProvider => Banner::new() - .severity(ui::Severity::Warning) - .child(Label::new(configuration_error.to_string())) - .action_slot( - Button::new("settings", "Configure Provider") + let zed_provider_configured = AgentSettings::get_global(cx) + .default_model + .as_ref() + .is_some_and(|selection| selection.provider.0.as_str() == "zed.dev"); + + let callout = if zed_provider_configured { + Callout::new() + .icon(IconName::Warning) + .severity(Severity::Warning) + .when(border_bottom, |this| { + this.border_position(ui::BorderPosition::Bottom) + }) + .title("Sign in to continue using Zed as your LLM provider.") + .actions_slot( + Button::new("sign_in", "Sign In") + .style(ButtonStyle::Tinted(ui::TintColor::Warning)) + .label_size(LabelSize::Small) + .on_click({ + let workspace = self.workspace.clone(); + move |_, _, cx| { + let Ok(client) = + workspace.update(cx, |workspace, _| workspace.client().clone()) + else { + return; + }; + + cx.spawn(async move |cx| { + client.sign_in_with_optional_connect(true, cx).await + }) + .detach_and_log_err(cx); + } + }), + ) + } else { + Callout::new() + .icon(IconName::Warning) + .severity(Severity::Warning) + .when(border_bottom, |this| { + this.border_position(ui::BorderPosition::Bottom) + }) + .title(configuration_error.to_string()) + .actions_slot( + Button::new("settings", "Configure") .style(ButtonStyle::Tinted(ui::TintColor::Warning)) .label_size(LabelSize::Small) .key_binding( - KeyBinding::for_action_in(&OpenSettings, &focus_handle, window, cx) + KeyBinding::for_action_in(&OpenSettings, focus_handle, window, cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_event, window, cx| { window.dispatch_action(OpenSettings.boxed_clone(), cx) }), - ), + ) + }; + + match configuration_error { + ConfigurationError::ModelNotFound + | ConfigurationError::ProviderNotAuthenticated(_) + | ConfigurationError::NoProvider => callout.into_any_element(), ConfigurationError::ProviderPendingTermsAcceptance(provider) => { - Banner::new().severity(ui::Severity::Warning).child( - h_flex().w_full().children( + Banner::new() + .severity(Severity::Warning) + .child(h_flex().w_full().children( provider.render_accept_terms( LanguageModelProviderTosView::ThreadEmptyState, cx, ), - ), - ) + )) + .into_any_element() } } } @@ -2778,7 +3216,7 @@ impl AgentPanel { let focus_handle = self.focus_handle(cx); let banner = Banner::new() - .severity(ui::Severity::Info) + .severity(Severity::Info) .child(Label::new("Consecutive tool use limit reached.").size(LabelSize::Small)) .action_slot( h_flex() @@ -2889,10 +3327,6 @@ impl AgentPanel { })) } - fn error_callout_bg(&self, cx: &Context) -> Hsla { - cx.theme().status().error.opacity(0.08) - } - fn render_payment_required_error( &self, thread: &Entity, @@ -2901,23 +3335,18 @@ impl AgentPanel { const ERROR_MESSAGE: &str = "You reached your free usage limit. Upgrade to Zed Pro for more prompts."; - let icon = Icon::new(IconName::XCircle) - .size(IconSize::Small) - .color(Color::Error); - - div() - .border_t_1() - .border_color(cx.theme().colors().border) - .child( - Callout::new() - .icon(icon) - .title("Free Usage Exceeded") - .description(ERROR_MESSAGE) - .tertiary_action(self.upgrade_button(thread, cx)) - .secondary_action(self.create_copy_button(ERROR_MESSAGE)) - .primary_action(self.dismiss_error_button(thread, cx)) - .bg_color(self.error_callout_bg(cx)), + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title("Free Usage Exceeded") + .description(ERROR_MESSAGE) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.upgrade_button(thread, cx)) + .child(self.create_copy_button(ERROR_MESSAGE)), ) + .dismiss_action(self.dismiss_error_button(thread, cx)) .into_any_element() } @@ -2932,40 +3361,22 @@ impl AgentPanel { Plan::ZedProTrial | Plan::ZedFree => "Upgrade to Zed Pro for more prompts.", }; - let icon = Icon::new(IconName::XCircle) - .size(IconSize::Small) - .color(Color::Error); - - div() - .border_t_1() - .border_color(cx.theme().colors().border) - .child( - Callout::new() - .icon(icon) - .title("Model Prompt Limit Reached") - .description(error_message) - .tertiary_action(self.upgrade_button(thread, cx)) - .secondary_action(self.create_copy_button(error_message)) - .primary_action(self.dismiss_error_button(thread, cx)) - .bg_color(self.error_callout_bg(cx)), + Callout::new() + .severity(Severity::Error) + .title("Model Prompt Limit Reached") + .description(error_message) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.upgrade_button(thread, cx)) + .child(self.create_copy_button(error_message)), ) + .dismiss_action(self.dismiss_error_button(thread, cx)) .into_any_element() } - fn render_error_message( - &self, - header: SharedString, - message: SharedString, - thread: &Entity, - cx: &mut Context, - ) -> AnyElement { - let message_with_header = format!("{}\n{}", header, message); - - let icon = Icon::new(IconName::XCircle) - .size(IconSize::Small) - .color(Color::Error); - - let retry_button = Button::new("retry", "Retry") + fn render_retry_button(&self, thread: &Entity) -> AnyElement { + Button::new("retry", "Retry") .icon(IconName::RotateCw) .icon_position(IconPosition::Start) .icon_size(IconSize::Small) @@ -2980,21 +3391,31 @@ impl AgentPanel { }); }); } - }); + }) + .into_any_element() + } - div() - .border_t_1() - .border_color(cx.theme().colors().border) - .child( - Callout::new() - .icon(icon) - .title(header) - .description(message.clone()) - .primary_action(retry_button) - .secondary_action(self.dismiss_error_button(thread, cx)) - .tertiary_action(self.create_copy_button(message_with_header)) - .bg_color(self.error_callout_bg(cx)), + fn render_error_message( + &self, + header: SharedString, + message: SharedString, + thread: &Entity, + cx: &mut Context, + ) -> AnyElement { + let message_with_header = format!("{}\n{}", header, message); + + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title(header) + .description(message) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.render_retry_button(thread)) + .child(self.create_copy_button(message_with_header)), ) + .dismiss_action(self.dismiss_error_button(thread, cx)) .into_any_element() } @@ -3003,60 +3424,39 @@ impl AgentPanel { message: SharedString, can_enable_burn_mode: bool, thread: &Entity, - cx: &mut Context, ) -> AnyElement { - let icon = Icon::new(IconName::XCircle) - .size(IconSize::Small) - .color(Color::Error); - - let retry_button = Button::new("retry", "Retry") - .icon(IconName::RotateCw) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .label_size(LabelSize::Small) - .on_click({ - let thread = thread.clone(); - move |_, window, cx| { - thread.update(cx, |thread, cx| { - thread.clear_last_error(); - thread.thread().update(cx, |thread, cx| { - thread.retry_last_completion(Some(window.window_handle()), cx); - }); - }); - } - }); - - let mut callout = Callout::new() - .icon(icon) + Callout::new() + .severity(Severity::Error) .title("Error") - .description(message.clone()) - .bg_color(self.error_callout_bg(cx)) - .primary_action(retry_button); - - if can_enable_burn_mode { - let burn_mode_button = Button::new("enable_burn_retry", "Enable Burn Mode and Retry") - .icon(IconName::ZedBurnMode) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .label_size(LabelSize::Small) - .on_click({ - let thread = thread.clone(); - move |_, window, cx| { - thread.update(cx, |thread, cx| { - thread.clear_last_error(); - thread.thread().update(cx, |thread, cx| { - thread.enable_burn_mode_and_retry(Some(window.window_handle()), cx); - }); - }); - } - }); - callout = callout.secondary_action(burn_mode_button); - } - - div() - .border_t_1() - .border_color(cx.theme().colors().border) - .child(callout) + .description(message) + .actions_slot( + h_flex() + .gap_0p5() + .when(can_enable_burn_mode, |this| { + this.child( + Button::new("enable_burn_retry", "Enable Burn Mode and Retry") + .icon(IconName::ZedBurnMode) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .label_size(LabelSize::Small) + .on_click({ + let thread = thread.clone(); + move |_, window, cx| { + thread.update(cx, |thread, cx| { + thread.clear_last_error(); + thread.thread().update(cx, |thread, cx| { + thread.enable_burn_mode_and_retry( + Some(window.window_handle()), + cx, + ); + }); + }); + } + }), + ) + }) + .child(self.render_retry_button(thread)), + ) .into_any_element() } @@ -3135,9 +3535,9 @@ impl AgentPanel { .on_drop(cx.listener(move |this, paths: &ExternalPaths, window, cx| { let tasks = paths .paths() - .into_iter() + .iter() .map(|path| { - Workspace::project_path_for_path(this.project.clone(), &path, false, cx) + Workspace::project_path_for_path(this.project.clone(), path, false, cx) }) .collect::>(); cx.spawn_in(window, async move |this, cx| { @@ -3187,8 +3587,10 @@ impl AgentPanel { .detach(); }); } - ActiveView::ExternalAgentThread { .. } => { - unimplemented!() + ActiveView::ExternalAgentThread { thread_view } => { + thread_view.update(cx, |thread_view, cx| { + thread_view.insert_dragged_files(paths, added_worktrees, window, cx); + }); } ActiveView::TextThread { context_editor, .. } => { context_editor.update(cx, |context_editor, cx| { @@ -3309,7 +3711,6 @@ impl Render for AgentPanel { message, can_enable_burn_mode, thread, - cx, ), }) .into_any(), @@ -3323,7 +3724,13 @@ impl Render for AgentPanel { ActiveView::ExternalAgentThread { thread_view, .. } => parent .child(thread_view.clone()) .child(self.render_drag_target(cx)), - ActiveView::History => parent.child(self.history.clone()), + ActiveView::History => { + if cx.has_flag::() { + parent.child(self.acp_history.clone()) + } else { + parent.child(self.history.clone()) + } + } ActiveView::TextThread { context_editor, buffer_search_bar, @@ -3337,16 +3744,13 @@ impl Render for AgentPanel { if !self.should_render_onboarding(cx) && let Some(err) = configuration_error.as_ref() { - this.child( - div().bg(cx.theme().colors().editor_background).p_2().child( - self.render_configuration_error( - err, - &self.focus_handle(cx), - window, - cx, - ), - ), - ) + this.child(self.render_configuration_error( + true, + err, + &self.focus_handle(cx), + window, + cx, + )) } else { this } @@ -3405,7 +3809,7 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { let text_thread_store = None; let context_store = cx.new(|_| ContextStore::new(project.clone(), None)); assistant.assist( - &prompt_editor, + prompt_editor, self.workspace.clone(), context_store, project, @@ -3488,7 +3892,11 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { // Wait to create a new context until the workspace is no longer // being updated. cx.defer_in(window, move |panel, window, cx| { - if let Some(message_editor) = panel.active_message_editor() { + if let Some(thread_view) = panel.active_thread_view() { + thread_view.update(cx, |thread_view, cx| { + thread_view.insert_selections(window, cx); + }); + } else if let Some(message_editor) = panel.active_message_editor() { message_editor.update(cx, |message_editor, cx| { message_editor.context_store().update(cx, |store, cx| { let buffer = buffer.read(cx); diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index fceb8f4c45157bfc47285e65c69f685bcb156eee..6084fd64235b4c276c841889cb0516661c14b1f7 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -5,7 +5,6 @@ mod agent_diff; mod agent_model_selector; mod agent_panel; mod buffer_codegen; -mod burn_mode_tooltip; mod context_picker; mod context_server_configuration; mod context_strip; @@ -64,6 +63,8 @@ actions!( NewTextThread, /// Toggles the context picker interface for adding files, symbols, or other context. ToggleContextPicker, + /// Toggles the menu to create new agent threads. + ToggleNewThreadMenu, /// Toggles the navigation menu for switching between threads and views. ToggleNavigationMenu, /// Toggles the options menu for agent settings and preferences. @@ -127,6 +128,12 @@ actions!( ] ); +#[derive(Clone, Copy, Debug, PartialEq, Eq, Action)] +#[action(namespace = agent)] +#[action(deprecated_aliases = ["assistant::QuoteSelection"])] +/// Quotes the current selection in the agent panel's message editor. +pub struct QuoteSelection; + /// Creates a new conversation thread, optionally based on an existing thread. #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)] #[action(namespace = agent)] @@ -145,7 +152,14 @@ pub struct NewExternalAgentThread { agent: Option, } -#[derive(Default, Clone, Copy, PartialEq, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] +#[action(namespace = agent)] +#[serde(deny_unknown_fields)] +pub struct NewNativeAgentThreadFromSummary { + from_session_id: agent_client_protocol::SessionId, +} + +#[derive(Default, Debug, Clone, Copy, PartialEq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] enum ExternalAgent { #[default] @@ -155,11 +169,15 @@ enum ExternalAgent { } impl ExternalAgent { - pub fn server(&self) -> Rc { + pub fn server( + &self, + fs: Arc, + history: Entity, + ) -> Rc { match self { ExternalAgent::Gemini => Rc::new(agent_servers::Gemini), ExternalAgent::ClaudeCode => Rc::new(agent_servers::ClaudeCode), - ExternalAgent::NativeAgent => Rc::new(agent2::NativeAgentServer), + ExternalAgent::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)), } } } @@ -235,13 +253,7 @@ pub fn init( client.telemetry().clone(), cx, ); - terminal_inline_assistant::init( - fs.clone(), - prompt_builder.clone(), - client.telemetry().clone(), - cx, - ); - indexed_docs::init(cx); + terminal_inline_assistant::init(fs.clone(), prompt_builder, client.telemetry().clone(), cx); cx.observe_new(move |workspace, window, cx| { ConfigureContextServerModal::register(workspace, language_registry.clone(), window, cx) }) @@ -320,7 +332,7 @@ fn init_language_model_settings(cx: &mut App) { cx.subscribe( &LanguageModelRegistry::global(cx), |_, event: &language_model::Event, cx| match event { - language_model::Event::ProviderStateChanged + language_model::Event::ProviderStateChanged(_) | language_model::Event::AddedProvider(_) | language_model::Event::RemovedProvider(_) => { update_active_language_model_from_settings(cx); @@ -387,7 +399,6 @@ fn register_slash_commands(cx: &mut App) { slash_command_registry.register_command(assistant_slash_commands::FetchSlashCommand, true); cx.observe_flag::({ - let slash_command_registry = slash_command_registry.clone(); move |is_enabled, _cx| { if is_enabled { slash_command_registry.register_command( @@ -408,12 +419,6 @@ fn update_slash_commands_from_settings(cx: &mut App) { let slash_command_registry = SlashCommandRegistry::global(cx); let settings = SlashCommandSettings::get_global(cx); - if settings.docs.enabled { - slash_command_registry.register_command(assistant_slash_commands::DocsSlashCommand, true); - } else { - slash_command_registry.unregister_command(assistant_slash_commands::DocsSlashCommand); - } - if settings.cargo_workspace.enabled { slash_command_registry .register_command(assistant_slash_commands::CargoWorkspaceSlashCommand, true); diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 615142b73dfd6eed59f635af780310290e3f6f25..04eb41793f2257a9dccfdd089594d2f90d0ce513 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -352,12 +352,12 @@ impl CodegenAlternative { event: &multi_buffer::Event, cx: &mut Context, ) { - if let multi_buffer::Event::TransactionUndone { transaction_id } = event { - if self.transformation_transaction_id == Some(*transaction_id) { - self.transformation_transaction_id = None; - self.generation = Task::ready(()); - cx.emit(CodegenEvent::Undone); - } + if let multi_buffer::Event::TransactionUndone { transaction_id } = event + && self.transformation_transaction_id == Some(*transaction_id) + { + self.transformation_transaction_id = None; + self.generation = Task::ready(()); + cx.emit(CodegenEvent::Undone); } } @@ -388,7 +388,7 @@ impl CodegenAlternative { } else { let request = self.build_request(&model, user_prompt, cx)?; cx.spawn(async move |_, cx| { - Ok(model.stream_completion_text(request.await, &cx).await?) + Ok(model.stream_completion_text(request.await, cx).await?) }) .boxed_local() }; @@ -447,7 +447,7 @@ impl CodegenAlternative { } }); - let temperature = AgentSettings::temperature_for_model(&model, cx); + let temperature = AgentSettings::temperature_for_model(model, cx); Ok(cx.spawn(async move |_cx| { let mut request_message = LanguageModelRequestMessage { @@ -576,38 +576,34 @@ impl CodegenAlternative { let mut lines = chunk.split('\n').peekable(); while let Some(line) = lines.next() { new_text.push_str(line); - if line_indent.is_none() { - if let Some(non_whitespace_ch_ix) = + if line_indent.is_none() + && let Some(non_whitespace_ch_ix) = new_text.find(|ch: char| !ch.is_whitespace()) - { - line_indent = Some(non_whitespace_ch_ix); - base_indent = base_indent.or(line_indent); - - let line_indent = line_indent.unwrap(); - let base_indent = base_indent.unwrap(); - let indent_delta = - line_indent as i32 - base_indent as i32; - let mut corrected_indent_len = cmp::max( - 0, - suggested_line_indent.len as i32 + indent_delta, - ) - as usize; - if first_line { - corrected_indent_len = corrected_indent_len - .saturating_sub( - selection_start.column as usize, - ); - } - - let indent_char = suggested_line_indent.char(); - let mut indent_buffer = [0; 4]; - let indent_str = - indent_char.encode_utf8(&mut indent_buffer); - new_text.replace_range( - ..line_indent, - &indent_str.repeat(corrected_indent_len), - ); + { + line_indent = Some(non_whitespace_ch_ix); + base_indent = base_indent.or(line_indent); + + let line_indent = line_indent.unwrap(); + let base_indent = base_indent.unwrap(); + let indent_delta = line_indent as i32 - base_indent as i32; + let mut corrected_indent_len = cmp::max( + 0, + suggested_line_indent.len as i32 + indent_delta, + ) + as usize; + if first_line { + corrected_indent_len = corrected_indent_len + .saturating_sub(selection_start.column as usize); } + + let indent_char = suggested_line_indent.char(); + let mut indent_buffer = [0; 4]; + let indent_str = + indent_char.encode_utf8(&mut indent_buffer); + new_text.replace_range( + ..line_indent, + &indent_str.repeat(corrected_indent_len), + ); } if line_indent.is_some() { @@ -1028,7 +1024,7 @@ where chunk.push('\n'); } - chunk.push_str(&line); + chunk.push_str(line); } consumed += line.len(); @@ -1133,7 +1129,7 @@ mod tests { ) }); - let chunks_tx = simulate_response_stream(codegen.clone(), cx); + let chunks_tx = simulate_response_stream(&codegen, cx); let mut new_text = concat!( " let mut x = 0;\n", @@ -1200,7 +1196,7 @@ mod tests { ) }); - let chunks_tx = simulate_response_stream(codegen.clone(), cx); + let chunks_tx = simulate_response_stream(&codegen, cx); cx.background_executor.run_until_parked(); @@ -1269,7 +1265,7 @@ mod tests { ) }); - let chunks_tx = simulate_response_stream(codegen.clone(), cx); + let chunks_tx = simulate_response_stream(&codegen, cx); cx.background_executor.run_until_parked(); @@ -1338,7 +1334,7 @@ mod tests { ) }); - let chunks_tx = simulate_response_stream(codegen.clone(), cx); + let chunks_tx = simulate_response_stream(&codegen, cx); let new_text = concat!( "func main() {\n", "\tx := 0\n", @@ -1395,7 +1391,7 @@ mod tests { ) }); - let chunks_tx = simulate_response_stream(codegen.clone(), cx); + let chunks_tx = simulate_response_stream(&codegen, cx); chunks_tx .unbounded_send("let mut x = 0;\nx += 1;".to_string()) .unwrap(); @@ -1477,7 +1473,7 @@ mod tests { } fn simulate_response_stream( - codegen: Entity, + codegen: &Entity, cx: &mut TestAppContext, ) -> mpsc::UnboundedSender { let (chunks_tx, chunks_rx) = mpsc::unbounded(); diff --git a/crates/agent_ui/src/burn_mode_tooltip.rs b/crates/agent_ui/src/burn_mode_tooltip.rs deleted file mode 100644 index 6354c07760f5aa0261b69e8dd08ce1f1b1be6023..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/burn_mode_tooltip.rs +++ /dev/null @@ -1,61 +0,0 @@ -use gpui::{Context, FontWeight, IntoElement, Render, Window}; -use ui::{prelude::*, tooltip_container}; - -pub struct BurnModeTooltip { - selected: bool, -} - -impl BurnModeTooltip { - pub fn new() -> Self { - Self { selected: false } - } - - pub fn selected(mut self, selected: bool) -> Self { - self.selected = selected; - self - } -} - -impl Render for BurnModeTooltip { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let (icon, color) = if self.selected { - (IconName::ZedBurnModeOn, Color::Error) - } else { - (IconName::ZedBurnMode, Color::Default) - }; - - let turned_on = h_flex() - .h_4() - .px_1() - .border_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().text_accent.opacity(0.1)) - .rounded_sm() - .child( - Label::new("ON") - .size(LabelSize::XSmall) - .weight(FontWeight::SEMIBOLD) - .color(Color::Accent), - ); - - let title = h_flex() - .gap_1p5() - .child(Icon::new(icon).size(IconSize::Small).color(color)) - .child(Label::new("Burn Mode")) - .when(self.selected, |title| title.child(turned_on)); - - tooltip_container(window, cx, |this, _, _| { - this - .child(title) - .child( - div() - .max_w_64() - .child( - Label::new("Enables models to use large context windows, unlimited tool calls, and other capabilities for expanded reasoning.") - .size(LabelSize::Small) - .color(Color::Muted) - ) - ) - }) - } -} diff --git a/crates/agent_ui/src/context_picker.rs b/crates/agent_ui/src/context_picker.rs index 32f9a096d9bf39e251545e3c82d3d4ff77ccb17e..405b5ed90ba1606ef97b8b048b959bfc354bc5cd 100644 --- a/crates/agent_ui/src/context_picker.rs +++ b/crates/agent_ui/src/context_picker.rs @@ -1,18 +1,19 @@ mod completion_provider; -mod fetch_context_picker; +pub(crate) mod fetch_context_picker; pub(crate) mod file_context_picker; -mod rules_context_picker; -mod symbol_context_picker; -mod thread_context_picker; +pub(crate) mod rules_context_picker; +pub(crate) mod symbol_context_picker; +pub(crate) mod thread_context_picker; use std::ops::Range; use std::path::{Path, PathBuf}; use std::sync::Arc; use anyhow::{Result, anyhow}; +use collections::HashSet; pub use completion_provider::ContextPickerCompletionProvider; use editor::display_map::{Crease, CreaseId, CreaseMetadata, FoldId}; -use editor::{Anchor, AnchorRangeExt as _, Editor, ExcerptId, FoldPlaceholder, ToOffset}; +use editor::{Anchor, Editor, ExcerptId, FoldPlaceholder, ToOffset}; use fetch_context_picker::FetchContextPicker; use file_context_picker::FileContextPicker; use file_context_picker::render_file_context_entry; @@ -45,7 +46,7 @@ use agent::{ }; #[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum ContextPickerEntry { +pub(crate) enum ContextPickerEntry { Mode(ContextPickerMode), Action(ContextPickerAction), } @@ -74,7 +75,7 @@ impl ContextPickerEntry { } #[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum ContextPickerMode { +pub(crate) enum ContextPickerMode { File, Symbol, Fetch, @@ -83,7 +84,7 @@ enum ContextPickerMode { } #[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum ContextPickerAction { +pub(crate) enum ContextPickerAction { AddSelections, } @@ -102,7 +103,7 @@ impl ContextPickerAction { pub fn icon(&self) -> IconName { match self { - Self::AddSelections => IconName::Context, + Self::AddSelections => IconName::Reader, } } } @@ -147,7 +148,7 @@ impl ContextPickerMode { match self { Self::File => IconName::File, Self::Symbol => IconName::Code, - Self::Fetch => IconName::Globe, + Self::Fetch => IconName::ToolWeb, Self::Thread => IconName::Thread, Self::Rules => RULES_ICON, } @@ -227,7 +228,7 @@ impl ContextPicker { } fn build_menu(&mut self, window: &mut Window, cx: &mut Context) -> Entity { - let context_picker = cx.entity().clone(); + let context_picker = cx.entity(); let menu = ContextMenu::build(window, cx, move |menu, _window, cx| { let recent = self.recent_entries(cx); @@ -384,12 +385,11 @@ impl ContextPicker { } pub fn select_first(&mut self, window: &mut Window, cx: &mut Context) { - match &self.mode { - ContextPickerState::Default(entity) => entity.update(cx, |entity, cx| { + // Other variants already select their first entry on open automatically + if let ContextPickerState::Default(entity) = &self.mode { + entity.update(cx, |entity, cx| { entity.select_first(&Default::default(), window, cx) - }), - // Other variants already select their first entry on open automatically - _ => {} + }) } } @@ -531,7 +531,7 @@ impl ContextPicker { return vec![]; }; - recent_context_picker_entries( + recent_context_picker_entries_with_store( context_store, self.thread_store.clone(), self.text_thread_store.clone(), @@ -585,7 +585,8 @@ impl Render for ContextPicker { }) } } -enum RecentEntry { + +pub(crate) enum RecentEntry { File { project_path: ProjectPath, path_prefix: Arc, @@ -593,7 +594,7 @@ enum RecentEntry { Thread(ThreadContextEntry), } -fn available_context_picker_entries( +pub(crate) fn available_context_picker_entries( prompt_store: &Option>, thread_store: &Option>, workspace: &Entity, @@ -608,9 +609,7 @@ fn available_context_picker_entries( .read(cx) .active_item(cx) .and_then(|item| item.downcast::()) - .map_or(false, |editor| { - editor.update(cx, |editor, cx| editor.has_non_empty_selection(cx)) - }); + .is_some_and(|editor| editor.update(cx, |editor, cx| editor.has_non_empty_selection(cx))); if has_selection { entries.push(ContextPickerEntry::Action( ContextPickerAction::AddSelections, @@ -630,24 +629,56 @@ fn available_context_picker_entries( entries } -fn recent_context_picker_entries( +fn recent_context_picker_entries_with_store( context_store: Entity, thread_store: Option>, text_thread_store: Option>, workspace: Entity, exclude_path: Option, cx: &App, +) -> Vec { + let project = workspace.read(cx).project(); + + let mut exclude_paths = context_store.read(cx).file_paths(cx); + exclude_paths.extend(exclude_path); + + let exclude_paths = exclude_paths + .into_iter() + .filter_map(|project_path| project.read(cx).absolute_path(&project_path, cx)) + .collect(); + + let exclude_threads = context_store.read(cx).thread_ids(); + + recent_context_picker_entries( + thread_store, + text_thread_store, + workspace, + &exclude_paths, + exclude_threads, + cx, + ) +} + +pub(crate) fn recent_context_picker_entries( + thread_store: Option>, + text_thread_store: Option>, + workspace: Entity, + exclude_paths: &HashSet, + exclude_threads: &HashSet, + cx: &App, ) -> Vec { let mut recent = Vec::with_capacity(6); - let mut current_files = context_store.read(cx).file_paths(cx); - current_files.extend(exclude_path); let workspace = workspace.read(cx); let project = workspace.project().read(cx); recent.extend( workspace .recent_navigation_history_iter(cx) - .filter(|(path, _)| !current_files.contains(path)) + .filter(|(_, abs_path)| { + abs_path + .as_ref() + .is_none_or(|path| !exclude_paths.contains(path.as_path())) + }) .take(4) .filter_map(|(project_path, _)| { project @@ -659,8 +690,6 @@ fn recent_context_picker_entries( }), ); - let current_threads = context_store.read(cx).thread_ids(); - let active_thread_id = workspace .panel::(cx) .and_then(|panel| Some(panel.read(cx).active_thread(cx)?.read(cx).id())); @@ -672,7 +701,7 @@ fn recent_context_picker_entries( let mut threads = unordered_thread_entries(thread_store, text_thread_store, cx) .filter(|(_, thread)| match thread { ThreadContextEntry::Thread { id, .. } => { - Some(id) != active_thread_id && !current_threads.contains(id) + Some(id) != active_thread_id && !exclude_threads.contains(id) } ThreadContextEntry::Context { .. } => true, }) @@ -710,7 +739,7 @@ fn add_selections_as_context( }) } -fn selection_ranges( +pub(crate) fn selection_ranges( workspace: &Entity, cx: &mut App, ) -> Vec<(Entity, Range)> { @@ -789,13 +818,8 @@ pub fn crease_for_mention( let render_trailer = move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any(); - Crease::inline( - range, - placeholder.clone(), - fold_toggle("mention"), - render_trailer, - ) - .with_metadata(CreaseMetadata { icon_path, label }) + Crease::inline(range, placeholder, fold_toggle("mention"), render_trailer) + .with_metadata(CreaseMetadata { icon_path, label }) } fn render_fold_icon_button( @@ -805,42 +829,9 @@ fn render_fold_icon_button( ) -> Arc, &mut App) -> AnyElement> { Arc::new({ move |fold_id, fold_range, cx| { - let is_in_text_selection = editor.upgrade().is_some_and(|editor| { - editor.update(cx, |editor, cx| { - let snapshot = editor - .buffer() - .update(cx, |multi_buffer, cx| multi_buffer.snapshot(cx)); - - let is_in_pending_selection = || { - editor - .selections - .pending - .as_ref() - .is_some_and(|pending_selection| { - pending_selection - .selection - .range() - .includes(&fold_range, &snapshot) - }) - }; - - let mut is_in_complete_selection = || { - editor - .selections - .disjoint_in_range::(fold_range.clone(), cx) - .into_iter() - .any(|selection| { - // This is needed to cover a corner case, if we just check for an existing - // selection in the fold range, having a cursor at the start of the fold - // marks it as selected. Non-empty selections don't cause this. - let length = selection.end - selection.start; - length > 0 - }) - }; - - is_in_pending_selection() || is_in_complete_selection() - }) - }); + let is_in_text_selection = editor + .update(cx, |editor, cx| editor.is_range_selected(&fold_range, cx)) + .unwrap_or_default(); ButtonLike::new(fold_id) .style(ButtonStyle::Filled) diff --git a/crates/agent_ui/src/context_picker/completion_provider.rs b/crates/agent_ui/src/context_picker/completion_provider.rs index 5ca0913be7b923136298590861a8ca13107952af..020d799c799f4184494fc3d9ec6a0ef8119a9897 100644 --- a/crates/agent_ui/src/context_picker/completion_provider.rs +++ b/crates/agent_ui/src/context_picker/completion_provider.rs @@ -35,7 +35,7 @@ use super::symbol_context_picker::search_symbols; use super::thread_context_picker::{ThreadContextEntry, ThreadMatch, search_threads}; use super::{ ContextPickerAction, ContextPickerEntry, ContextPickerMode, MentionLink, RecentEntry, - available_context_picker_entries, recent_context_picker_entries, selection_ranges, + available_context_picker_entries, recent_context_picker_entries_with_store, selection_ranges, }; use crate::message_editor::ContextCreasesAddon; @@ -79,8 +79,7 @@ fn search( ) -> Task> { match mode { Some(ContextPickerMode::File) => { - let search_files_task = - search_files(query.clone(), cancellation_flag.clone(), &workspace, cx); + let search_files_task = search_files(query, cancellation_flag, &workspace, cx); cx.background_spawn(async move { search_files_task .await @@ -91,8 +90,7 @@ fn search( } Some(ContextPickerMode::Symbol) => { - let search_symbols_task = - search_symbols(query.clone(), cancellation_flag.clone(), &workspace, cx); + let search_symbols_task = search_symbols(query, cancellation_flag, &workspace, cx); cx.background_spawn(async move { search_symbols_task .await @@ -108,13 +106,8 @@ fn search( .and_then(|t| t.upgrade()) .zip(text_thread_context_store.as_ref().and_then(|t| t.upgrade())) { - let search_threads_task = search_threads( - query.clone(), - cancellation_flag.clone(), - thread_store, - context_store, - cx, - ); + let search_threads_task = + search_threads(query, cancellation_flag, thread_store, context_store, cx); cx.background_spawn(async move { search_threads_task .await @@ -137,8 +130,7 @@ fn search( Some(ContextPickerMode::Rules) => { if let Some(prompt_store) = prompt_store.as_ref() { - let search_rules_task = - search_rules(query.clone(), cancellation_flag.clone(), prompt_store, cx); + let search_rules_task = search_rules(query, cancellation_flag, prompt_store, cx); cx.background_spawn(async move { search_rules_task .await @@ -196,7 +188,7 @@ fn search( let executor = cx.background_executor().clone(); let search_files_task = - search_files(query.clone(), cancellation_flag.clone(), &workspace, cx); + search_files(query.clone(), cancellation_flag, &workspace, cx); let entries = available_context_picker_entries(&prompt_store, &thread_store, &workspace, cx); @@ -283,7 +275,7 @@ impl ContextPickerCompletionProvider { ) -> Option { match entry { ContextPickerEntry::Mode(mode) => Some(Completion { - replace_range: source_range.clone(), + replace_range: source_range, new_text: format!("@{} ", mode.keyword()), label: CodeLabel::plain(mode.label().to_string(), None), icon_path: Some(mode.icon().path().into()), @@ -330,9 +322,6 @@ impl ContextPickerCompletionProvider { ); let callback = Arc::new({ - let context_store = context_store.clone(); - let selections = selections.clone(); - let selection_infos = selection_infos.clone(); move |_, window: &mut Window, cx: &mut App| { context_store.update(cx, |context_store, cx| { for (buffer, range) in &selections { @@ -371,7 +360,7 @@ impl ContextPickerCompletionProvider { line_range.end.row + 1 ) .into(), - IconName::Context.path().into(), + IconName::Reader.path().into(), range, editor.downgrade(), ); @@ -441,7 +430,7 @@ impl ContextPickerCompletionProvider { excerpt_id, source_range.start, new_text_len - 1, - editor.clone(), + editor, context_store.clone(), move |window, cx| match &thread_entry { ThreadContextEntry::Thread { id, .. } => { @@ -510,7 +499,7 @@ impl ContextPickerCompletionProvider { excerpt_id, source_range.start, new_text_len - 1, - editor.clone(), + editor, context_store.clone(), move |_, cx| { let user_prompt_id = rules.prompt_id; @@ -539,15 +528,15 @@ impl ContextPickerCompletionProvider { label: CodeLabel::plain(url_to_fetch.to_string(), None), documentation: None, source: project::CompletionSource::Custom, - icon_path: Some(IconName::Globe.path().into()), + icon_path: Some(IconName::ToolWeb.path().into()), insert_text_mode: None, confirm: Some(confirm_completion_callback( - IconName::Globe.path().into(), + IconName::ToolWeb.path().into(), url_to_fetch.clone(), excerpt_id, source_range.start, new_text_len - 1, - editor.clone(), + editor, context_store.clone(), move |_, cx| { let context_store = context_store.clone(); @@ -704,16 +693,16 @@ impl ContextPickerCompletionProvider { excerpt_id, source_range.start, new_text_len - 1, - editor.clone(), + editor, context_store.clone(), move |_, cx| { let symbol = symbol.clone(); let context_store = context_store.clone(); let workspace = workspace.clone(); let result = super::symbol_context_picker::add_symbol( - symbol.clone(), + symbol, false, - workspace.clone(), + workspace, context_store.downgrade(), cx, ); @@ -728,11 +717,11 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); let mut label = CodeLabel::default(); - label.push_str(&file_name, None); + label.push_str(file_name, None); label.push_str(" ", None); if let Some(directory) = directory { - label.push_str(&directory, comment_id); + label.push_str(directory, comment_id); } label.filter_range = 0..label.text().len(); @@ -787,7 +776,7 @@ impl CompletionProvider for ContextPickerCompletionProvider { .and_then(|b| b.read(cx).file()) .map(|file| ProjectPath::from_file(file.as_ref(), cx)); - let recent_entries = recent_context_picker_entries( + let recent_entries = recent_context_picker_entries_with_store( context_store.clone(), thread_store.clone(), text_thread_store.clone(), @@ -1020,7 +1009,7 @@ impl MentionCompletion { && line .chars() .nth(last_mention_start - 1) - .map_or(false, |c| !c.is_whitespace()) + .is_some_and(|c| !c.is_whitespace()) { return None; } @@ -1162,7 +1151,7 @@ mod tests { impl Focusable for AtMentionEditor { fn focus_handle(&self, cx: &App) -> FocusHandle { - self.0.read(cx).focus_handle(cx).clone() + self.0.read(cx).focus_handle(cx) } } @@ -1480,7 +1469,7 @@ mod tests { let completions = editor.current_completions().expect("Missing completions"); completions .into_iter() - .map(|completion| completion.label.text.to_string()) + .map(|completion| completion.label.text) .collect::>() } diff --git a/crates/agent_ui/src/context_picker/fetch_context_picker.rs b/crates/agent_ui/src/context_picker/fetch_context_picker.rs index 8ff68a8365ee01ac79d707abf00197bf5175e43a..dd558b2a1c88f60e68313b208b076a0974b30f85 100644 --- a/crates/agent_ui/src/context_picker/fetch_context_picker.rs +++ b/crates/agent_ui/src/context_picker/fetch_context_picker.rs @@ -226,9 +226,10 @@ impl PickerDelegate for FetchContextPickerDelegate { _window: &mut Window, cx: &mut Context>, ) -> Option { - let added = self.context_store.upgrade().map_or(false, |context_store| { - context_store.read(cx).includes_url(&self.url) - }); + let added = self + .context_store + .upgrade() + .is_some_and(|context_store| context_store.read(cx).includes_url(&self.url)); Some( ListItem::new(ix) diff --git a/crates/agent_ui/src/context_picker/file_context_picker.rs b/crates/agent_ui/src/context_picker/file_context_picker.rs index eaf9ed16d6fc7a09854d9f0160d87e23f3c5ffd8..6c224caf4c114c389991907fd7e382fb7e4e49d1 100644 --- a/crates/agent_ui/src/context_picker/file_context_picker.rs +++ b/crates/agent_ui/src/context_picker/file_context_picker.rs @@ -239,9 +239,7 @@ pub(crate) fn search_files( PathMatchCandidateSet { snapshot: worktree.snapshot(), - include_ignored: worktree - .root_entry() - .map_or(false, |entry| entry.is_ignored), + include_ignored: worktree.root_entry().is_some_and(|entry| entry.is_ignored), include_root_name: true, candidates: project::Candidates::Entries, } @@ -315,7 +313,7 @@ pub fn render_file_context_entry( context_store: WeakEntity, cx: &App, ) -> Stateful
{ - let (file_name, directory) = extract_file_name_and_directory(&path, path_prefix); + let (file_name, directory) = extract_file_name_and_directory(path, path_prefix); let added = context_store.upgrade().and_then(|context_store| { let project_path = ProjectPath { @@ -334,7 +332,7 @@ pub fn render_file_context_entry( let file_icon = if is_directory { FileIcons::get_folder_icon(false, cx) } else { - FileIcons::get_icon(&path, cx) + FileIcons::get_icon(path, cx) } .map(Icon::from_path) .unwrap_or_else(|| Icon::new(IconName::File)); diff --git a/crates/agent_ui/src/context_picker/rules_context_picker.rs b/crates/agent_ui/src/context_picker/rules_context_picker.rs index 8ce821cfaaab0a49f4af70fca13c1ed202de20a1..f3982f61cb9822a238dc6573e81430de98c78838 100644 --- a/crates/agent_ui/src/context_picker/rules_context_picker.rs +++ b/crates/agent_ui/src/context_picker/rules_context_picker.rs @@ -159,7 +159,7 @@ pub fn render_thread_context_entry( context_store: WeakEntity, cx: &mut App, ) -> Div { - let added = context_store.upgrade().map_or(false, |context_store| { + let added = context_store.upgrade().is_some_and(|context_store| { context_store .read(cx) .includes_user_rules(user_rules.prompt_id) diff --git a/crates/agent_ui/src/context_picker/symbol_context_picker.rs b/crates/agent_ui/src/context_picker/symbol_context_picker.rs index 05e77deece6117d250d6efedbd9d24c6716b757e..b00d4e3693d2aff12c54c5ebda9acf93c74bfc7d 100644 --- a/crates/agent_ui/src/context_picker/symbol_context_picker.rs +++ b/crates/agent_ui/src/context_picker/symbol_context_picker.rs @@ -289,12 +289,12 @@ pub(crate) fn search_symbols( .iter() .enumerate() .map(|(id, symbol)| { - StringMatchCandidate::new(id, &symbol.label.filter_text()) + StringMatchCandidate::new(id, symbol.label.filter_text()) }) .partition(|candidate| { project .entry_for_path(&symbols[candidate.id].path, cx) - .map_or(false, |e| !e.is_ignored) + .is_some_and(|e| !e.is_ignored) }) }) .log_err() diff --git a/crates/agent_ui/src/context_picker/thread_context_picker.rs b/crates/agent_ui/src/context_picker/thread_context_picker.rs index 15cc731f8f2b7c82885c566273bc1cda9f3c156a..66654f3d8c066701a9f84df4d53bd9f1d9c30d3b 100644 --- a/crates/agent_ui/src/context_picker/thread_context_picker.rs +++ b/crates/agent_ui/src/context_picker/thread_context_picker.rs @@ -167,7 +167,7 @@ impl PickerDelegate for ThreadContextPickerDelegate { return; }; let open_thread_task = - thread_store.update(cx, |this, cx| this.open_thread(&id, window, cx)); + thread_store.update(cx, |this, cx| this.open_thread(id, window, cx)); cx.spawn(async move |this, cx| { let thread = open_thread_task.await?; @@ -236,12 +236,10 @@ pub fn render_thread_context_entry( let is_added = match entry { ThreadContextEntry::Thread { id, .. } => context_store .upgrade() - .map_or(false, |ctx_store| ctx_store.read(cx).includes_thread(&id)), - ThreadContextEntry::Context { path, .. } => { - context_store.upgrade().map_or(false, |ctx_store| { - ctx_store.read(cx).includes_text_thread(path) - }) - } + .is_some_and(|ctx_store| ctx_store.read(cx).includes_thread(id)), + ThreadContextEntry::Context { path, .. } => context_store + .upgrade() + .is_some_and(|ctx_store| ctx_store.read(cx).includes_text_thread(path)), }; h_flex() @@ -338,7 +336,7 @@ pub(crate) fn search_threads( let candidates = threads .iter() .enumerate() - .map(|(id, (_, thread))| StringMatchCandidate::new(id, &thread.title())) + .map(|(id, (_, thread))| StringMatchCandidate::new(id, thread.title())) .collect::>(); let matches = fuzzy::match_strings( &candidates, diff --git a/crates/agent_ui/src/context_strip.rs b/crates/agent_ui/src/context_strip.rs index 369964f165dc4d4460fd446c949538ec820fb82e..d25d7d35443e6ca7c28bb0894f72c0063f500721 100644 --- a/crates/agent_ui/src/context_strip.rs +++ b/crates/agent_ui/src/context_strip.rs @@ -145,7 +145,7 @@ impl ContextStrip { } let file_name = active_buffer.file()?.file_name(cx); - let icon_path = FileIcons::get_icon(&Path::new(&file_name), cx); + let icon_path = FileIcons::get_icon(Path::new(&file_name), cx); Some(SuggestedContext::File { name: file_name.to_string_lossy().into_owned().into(), buffer: active_buffer_entity.downgrade(), @@ -368,16 +368,16 @@ impl ContextStrip { _window: &mut Window, cx: &mut Context, ) { - if let Some(suggested) = self.suggested_context(cx) { - if self.is_suggested_focused(&self.added_contexts(cx)) { - self.add_suggested_context(&suggested, cx); - } + if let Some(suggested) = self.suggested_context(cx) + && self.is_suggested_focused(&self.added_contexts(cx)) + { + self.add_suggested_context(&suggested, cx); } } fn add_suggested_context(&mut self, suggested: &SuggestedContext, cx: &mut Context) { self.context_store.update(cx, |context_store, cx| { - context_store.add_suggested_context(&suggested, cx) + context_store.add_suggested_context(suggested, cx) }); cx.notify(); } diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 4a4a747899ecc310666685de336bacffc4b271e6..13f1234b4de0aec6f7540c0f34950b8b5c3ef585 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -72,7 +72,7 @@ pub fn init( let Some(window) = window else { return; }; - let workspace = cx.entity().clone(); + let workspace = cx.entity(); InlineAssistant::update_global(cx, |inline_assistant, cx| { inline_assistant.register_workspace(&workspace, window, cx) }); @@ -182,13 +182,13 @@ impl InlineAssistant { match event { workspace::Event::UserSavedItem { item, .. } => { // When the user manually saves an editor, automatically accepts all finished transformations. - if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { - if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { - for assist_id in editor_assists.assist_ids.clone() { - let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { - self.finish_assist(assist_id, false, window, cx) - } + if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) + && let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) + { + for assist_id in editor_assists.assist_ids.clone() { + let assist = &self.assists[&assist_id]; + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { + self.finish_assist(assist_id, false, window, cx) } } } @@ -342,13 +342,11 @@ impl InlineAssistant { ) .await .ok(); - if let Some(answer) = answer { - if answer == 0 { - cx.update(|window, cx| { - window.dispatch_action(Box::new(OpenSettings), cx) - }) + if let Some(answer) = answer + && answer == 0 + { + cx.update(|window, cx| window.dispatch_action(Box::new(OpenSettings), cx)) .ok(); - } } anyhow::Ok(()) }) @@ -435,11 +433,11 @@ impl InlineAssistant { } } - if let Some(prev_selection) = selections.last_mut() { - if selection.start <= prev_selection.end { - prev_selection.end = selection.end; - continue; - } + if let Some(prev_selection) = selections.last_mut() + && selection.start <= prev_selection.end + { + prev_selection.end = selection.end; + continue; } let latest_selection = newest_selection.get_or_insert_with(|| selection.clone()); @@ -526,9 +524,9 @@ impl InlineAssistant { if assist_to_focus.is_none() { let focus_assist = if newest_selection.reversed { - range.start.to_point(&snapshot) == newest_selection.start + range.start.to_point(snapshot) == newest_selection.start } else { - range.end.to_point(&snapshot) == newest_selection.end + range.end.to_point(snapshot) == newest_selection.end }; if focus_assist { assist_to_focus = Some(assist_id); @@ -550,7 +548,7 @@ impl InlineAssistant { let editor_assists = self .assists_by_editor .entry(editor.downgrade()) - .or_insert_with(|| EditorInlineAssists::new(&editor, window, cx)); + .or_insert_with(|| EditorInlineAssists::new(editor, window, cx)); let mut assist_group = InlineAssistGroup::new(); for (assist_id, range, prompt_editor, prompt_block_id, end_block_id) in assists { let codegen = prompt_editor.read(cx).codegen().clone(); @@ -649,7 +647,7 @@ impl InlineAssistant { let editor_assists = self .assists_by_editor .entry(editor.downgrade()) - .or_insert_with(|| EditorInlineAssists::new(&editor, window, cx)); + .or_insert_with(|| EditorInlineAssists::new(editor, window, cx)); let mut assist_group = InlineAssistGroup::new(); self.assists.insert( @@ -985,14 +983,13 @@ impl InlineAssistant { EditorEvent::SelectionsChanged { .. } => { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; - if let Some(decorations) = assist.decorations.as_ref() { - if decorations + if let Some(decorations) = assist.decorations.as_ref() + && decorations .prompt_editor .focus_handle(cx) .is_focused(window) - { - return; - } + { + return; } } @@ -1123,7 +1120,7 @@ impl InlineAssistant { if editor_assists .scroll_lock .as_ref() - .map_or(false, |lock| lock.assist_id == assist_id) + .is_some_and(|lock| lock.assist_id == assist_id) { editor_assists.scroll_lock = None; } @@ -1503,20 +1500,18 @@ impl InlineAssistant { window: &mut Window, cx: &mut App, ) -> Option { - if let Some(terminal_panel) = workspace.panel::(cx) { - if terminal_panel + if let Some(terminal_panel) = workspace.panel::(cx) + && terminal_panel .read(cx) .focus_handle(cx) .contains_focused(window, cx) - { - if let Some(terminal_view) = terminal_panel.read(cx).pane().and_then(|pane| { - pane.read(cx) - .active_item() - .and_then(|t| t.downcast::()) - }) { - return Some(InlineAssistTarget::Terminal(terminal_view)); - } - } + && let Some(terminal_view) = terminal_panel.read(cx).pane().and_then(|pane| { + pane.read(cx) + .active_item() + .and_then(|t| t.downcast::()) + }) + { + return Some(InlineAssistTarget::Terminal(terminal_view)); } let context_editor = agent_panel @@ -1537,13 +1532,11 @@ impl InlineAssistant { .and_then(|item| item.act_as::(cx)) { Some(InlineAssistTarget::Editor(workspace_editor)) - } else if let Some(terminal_view) = workspace - .active_item(cx) - .and_then(|item| item.act_as::(cx)) - { - Some(InlineAssistTarget::Terminal(terminal_view)) } else { - None + workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .map(InlineAssistTarget::Terminal) } } } @@ -1698,7 +1691,7 @@ impl InlineAssist { }), range, codegen: codegen.clone(), - workspace: workspace.clone(), + workspace, _subscriptions: vec![ window.on_focus_in(&prompt_editor_focus_handle, cx, move |_, cx| { InlineAssistant::update_global(cx, |this, cx| { @@ -1741,22 +1734,20 @@ impl InlineAssist { return; }; - if let CodegenStatus::Error(error) = codegen.read(cx).status(cx) { - if assist.decorations.is_none() { - if let Some(workspace) = assist.workspace.upgrade() { - let error = format!("Inline assistant error: {}", error); - workspace.update(cx, |workspace, cx| { - struct InlineAssistantError; - - let id = - NotificationId::composite::( - assist_id.0, - ); - - workspace.show_toast(Toast::new(id, error), cx); - }) - } - } + if let CodegenStatus::Error(error) = codegen.read(cx).status(cx) + && assist.decorations.is_none() + && let Some(workspace) = assist.workspace.upgrade() + { + let error = format!("Inline assistant error: {}", error); + workspace.update(cx, |workspace, cx| { + struct InlineAssistantError; + + let id = NotificationId::composite::( + assist_id.0, + ); + + workspace.show_toast(Toast::new(id, error), cx); + }) } if assist.decorations.is_none() { @@ -1821,18 +1812,18 @@ impl CodeActionProvider for AssistantCodeActionProvider { has_diagnostics = true; } if has_diagnostics { - if let Some(symbols_containing_start) = snapshot.symbols_containing(range.start, None) { - if let Some(symbol) = symbols_containing_start.last() { - range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); - range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); - } + if let Some(symbols_containing_start) = snapshot.symbols_containing(range.start, None) + && let Some(symbol) = symbols_containing_start.last() + { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); } - if let Some(symbols_containing_end) = snapshot.symbols_containing(range.end, None) { - if let Some(symbol) = symbols_containing_end.last() { - range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); - range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); - } + if let Some(symbols_containing_end) = snapshot.symbols_containing(range.end, None) + && let Some(symbol) = symbols_containing_end.last() + { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); } Task::ready(Ok(vec![CodeAction { diff --git a/crates/agent_ui/src/inline_prompt_editor.rs b/crates/agent_ui/src/inline_prompt_editor.rs index a5f90edb57ca9142a3661bdf64629e6f05e96191..a626122769f656cf6627d104d00f4fa3a368e7db 100644 --- a/crates/agent_ui/src/inline_prompt_editor.rs +++ b/crates/agent_ui/src/inline_prompt_editor.rs @@ -75,7 +75,7 @@ impl Render for PromptEditor { let codegen = codegen.read(cx); if codegen.alternative_count(cx) > 1 { - buttons.push(self.render_cycle_controls(&codegen, cx)); + buttons.push(self.render_cycle_controls(codegen, cx)); } let editor_margins = editor_margins.lock(); @@ -345,7 +345,7 @@ impl PromptEditor { let prompt = self.editor.read(cx).text(cx); if self .prompt_history_ix - .map_or(true, |ix| self.prompt_history[ix] != prompt) + .is_none_or(|ix| self.prompt_history[ix] != prompt) { self.prompt_history_ix.take(); self.pending_prompt = prompt; @@ -541,7 +541,7 @@ impl PromptEditor { match &self.mode { PromptEditorMode::Terminal { .. } => vec![ accept, - IconButton::new("confirm", IconName::PlayOutlined) + IconButton::new("confirm", IconName::PlayFilled) .icon_color(Color::Info) .shape(IconButtonShape::Square) .tooltip(|window, cx| { @@ -1229,27 +1229,27 @@ pub enum GenerationMode { impl GenerationMode { fn start_label(self) -> &'static str { match self { - GenerationMode::Generate { .. } => "Generate", + GenerationMode::Generate => "Generate", GenerationMode::Transform => "Transform", } } fn tooltip_interrupt(self) -> &'static str { match self { - GenerationMode::Generate { .. } => "Interrupt Generation", + GenerationMode::Generate => "Interrupt Generation", GenerationMode::Transform => "Interrupt Transform", } } fn tooltip_restart(self) -> &'static str { match self { - GenerationMode::Generate { .. } => "Restart Generation", + GenerationMode::Generate => "Restart Generation", GenerationMode::Transform => "Restart Transform", } } fn tooltip_accept(self) -> &'static str { match self { - GenerationMode::Generate { .. } => "Accept Generation", + GenerationMode::Generate => "Accept Generation", GenerationMode::Transform => "Accept Transform", } } diff --git a/crates/agent_ui/src/language_model_selector.rs b/crates/agent_ui/src/language_model_selector.rs index 7121624c87f6e44ba73f8380bfdf60227cba5b90..3633e533da97b2b80e5c8d62c271da7121d3582b 100644 --- a/crates/agent_ui/src/language_model_selector.rs +++ b/crates/agent_ui/src/language_model_selector.rs @@ -1,5 +1,6 @@ use std::{cmp::Reverse, sync::Arc}; +use cloud_llm_client::Plan; use collections::{HashSet, IndexMap}; use feature_flags::ZedProFeatureFlag; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; @@ -10,7 +11,6 @@ use language_model::{ }; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; -use proto::Plan; use ui::{ListItem, ListItemSpacing, prelude::*}; const TRY_ZED_PRO_URL: &str = "https://zed.dev/pro"; @@ -93,7 +93,7 @@ impl LanguageModelPickerDelegate { let entries = models.entries(); Self { - on_model_changed: on_model_changed.clone(), + on_model_changed, all_models: Arc::new(models), selected_index: Self::get_active_model_index(&entries, get_active_model(cx)), filtered_entries: entries, @@ -104,7 +104,7 @@ impl LanguageModelPickerDelegate { window, |picker, _, event, window, cx| { match event { - language_model::Event::ProviderStateChanged + language_model::Event::ProviderStateChanged(_) | language_model::Event::AddedProvider(_) | language_model::Event::RemovedProvider(_) => { let query = picker.query(cx); @@ -296,7 +296,7 @@ impl ModelMatcher { pub fn fuzzy_search(&self, query: &str) -> Vec { let mut matches = self.bg_executor.block(match_strings( &self.candidates, - &query, + query, false, true, 100, @@ -514,7 +514,7 @@ impl PickerDelegate for LanguageModelPickerDelegate { .pl_0p5() .gap_1p5() .w(px(240.)) - .child(Label::new(model_info.model.name().0.clone()).truncate()), + .child(Label::new(model_info.model.name().0).truncate()), ) .end_slot(div().pr_3().when(is_selected, |this| { this.child( @@ -536,7 +536,7 @@ impl PickerDelegate for LanguageModelPickerDelegate { ) -> Option { use feature_flags::FeatureFlagAppExt; - let plan = proto::Plan::ZedPro; + let plan = Plan::ZedPro; Some( h_flex() @@ -557,7 +557,7 @@ impl PickerDelegate for LanguageModelPickerDelegate { window .dispatch_action(Box::new(zed_actions::OpenAccountSettings), cx) }), - Plan::Free | Plan::ZedProTrial => Button::new( + Plan::ZedFree | Plan::ZedProTrial => Button::new( "try-pro", if plan == Plan::ZedProTrial { "Upgrade to Pro" diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 2185885347478269eb260668aaf3cfaaf2c365c9..bed10e90a7315f69f7e89d749d94767276fa1a22 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -6,7 +6,7 @@ use crate::agent_diff::AgentDiffThread; use crate::agent_model_selector::AgentModelSelector; use crate::tool_compatibility::{IncompatibleToolsState, IncompatibleToolsTooltip}; use crate::ui::{ - MaxModeTooltip, + BurnModeTooltip, preview::{AgentPreview, UsageCallout}, }; use agent::history_store::HistoryStore; @@ -14,7 +14,7 @@ use agent::{ context::{AgentContextKey, ContextLoadResult, load_context}, context_store::ContextStoreEvent, }; -use agent_settings::{AgentSettings, CompletionMode}; +use agent_settings::{AgentProfileId, AgentSettings, CompletionMode}; use ai_onboarding::ApiKeysWithProviders; use buffer_diff::BufferDiff; use cloud_llm_client::CompletionIntent; @@ -55,7 +55,7 @@ use zed_actions::agent::ToggleModelSelector; use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider, crease_for_mention}; use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind}; -use crate::profile_selector::ProfileSelector; +use crate::profile_selector::{ProfileProvider, ProfileSelector}; use crate::{ ActiveThread, AgentDiffPane, ChatWithFollow, ExpandMessageEditor, Follow, KeepAll, ModelUsageContext, NewThread, OpenAgentDiff, RejectAll, RemoveAllContext, ToggleBurnMode, @@ -117,7 +117,7 @@ pub(crate) fn create_editor( let mut editor = Editor::new( editor::EditorMode::AutoHeight { min_lines, - max_lines: max_lines, + max_lines, }, buffer, None, @@ -152,6 +152,24 @@ pub(crate) fn create_editor( editor } +impl ProfileProvider for Entity { + fn profiles_supported(&self, cx: &App) -> bool { + self.read(cx) + .configured_model() + .is_some_and(|model| model.model.supports_tools()) + } + + fn profile_id(&self, cx: &App) -> AgentProfileId { + self.read(cx).profile().id().clone() + } + + fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) { + self.update(cx, |this, cx| { + this.set_profile(profile_id, cx); + }); + } +} + impl MessageEditor { pub fn new( fs: Arc, @@ -197,9 +215,10 @@ impl MessageEditor { let subscriptions = vec![ cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event), - cx.subscribe(&editor, |this, _, event, cx| match event { - EditorEvent::BufferEdited => this.handle_message_changed(cx), - _ => {} + cx.subscribe(&editor, |this, _, event: &EditorEvent, cx| { + if event == &EditorEvent::BufferEdited { + this.handle_message_changed(cx) + } }), cx.observe(&context_store, |this, _, cx| { // When context changes, reload it for token counting. @@ -221,14 +240,15 @@ impl MessageEditor { ) }); - let profile_selector = - cx.new(|cx| ProfileSelector::new(fs, thread.clone(), editor.focus_handle(cx), cx)); + let profile_selector = cx.new(|cx| { + ProfileSelector::new(fs, Arc::new(thread.clone()), editor.focus_handle(cx), cx) + }); Self { editor: editor.clone(), project: thread.read(cx).project().clone(), thread, - incompatible_tools_state: incompatible_tools.clone(), + incompatible_tools_state: incompatible_tools, workspace, context_store, prompt_store, @@ -422,11 +442,11 @@ impl MessageEditor { thread.cancel_editing(cx); }); - let cancelled = self.thread.update(cx, |thread, cx| { + let canceled = self.thread.update(cx, |thread, cx| { thread.cancel_last_completion(Some(window.window_handle()), cx) }); - if cancelled { + if canceled { self.set_editor_is_expanded(false, cx); self.send_to_model(window, cx); } @@ -605,7 +625,7 @@ impl MessageEditor { this.toggle_burn_mode(&ToggleBurnMode, window, cx); })) .tooltip(move |_window, cx| { - cx.new(|_| MaxModeTooltip::new().selected(burn_mode_enabled)) + cx.new(|_| BurnModeTooltip::new().selected(burn_mode_enabled)) .into() }) .into_any_element(), @@ -671,11 +691,7 @@ impl MessageEditor { .as_ref() .map(|model| { self.incompatible_tools_state.update(cx, |state, cx| { - state - .incompatible_tools(&model.model, cx) - .iter() - .cloned() - .collect::>() + state.incompatible_tools(&model.model, cx).to_vec() }) }) .unwrap_or_default(); @@ -725,7 +741,7 @@ impl MessageEditor { .when(focus_handle.is_focused(window), |this| { this.child( IconButton::new("toggle-height", expand_icon) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .tooltip({ let focus_handle = focus_handle.clone(); @@ -823,7 +839,6 @@ impl MessageEditor { .child(self.profile_selector.clone()) .child(self.model_selector.clone()) .map({ - let focus_handle = focus_handle.clone(); move |parent| { if is_generating { parent @@ -831,7 +846,7 @@ impl MessageEditor { parent.child( IconButton::new( "stop-generation", - IconName::StopFilled, + IconName::Stop, ) .icon_color(Color::Error) .style(ButtonStyle::Tinted( @@ -1117,7 +1132,7 @@ impl MessageEditor { ) .when(is_edit_changes_expanded, |parent| { parent.child( - v_flex().children(changed_buffers.into_iter().enumerate().flat_map( + v_flex().children(changed_buffers.iter().enumerate().flat_map( |(index, (buffer, _diff))| { let file = buffer.read(cx).file()?; let path = file.path(); @@ -1147,7 +1162,7 @@ impl MessageEditor { .buffer_font(cx) }); - let file_icon = FileIcons::get_icon(&path, cx) + let file_icon = FileIcons::get_icon(path, cx) .map(Icon::from_path) .map(|icon| icon.color(Color::Muted).size(IconSize::Small)) .unwrap_or_else(|| { @@ -1274,7 +1289,7 @@ impl MessageEditor { self.thread .read(cx) .configured_model() - .map_or(false, |model| model.provider.id() == ZED_CLOUD_PROVIDER_ID) + .is_some_and(|model| model.provider.id() == ZED_CLOUD_PROVIDER_ID) } fn render_usage_callout(&self, line_height: Pixels, cx: &mut Context) -> Option
{ @@ -1304,14 +1319,10 @@ impl MessageEditor { token_usage_ratio: TokenUsageRatio, cx: &mut Context, ) -> Option
{ - let icon = if token_usage_ratio == TokenUsageRatio::Exceeded { - Icon::new(IconName::X) - .color(Color::Error) - .size(IconSize::XSmall) + let (icon, severity) = if token_usage_ratio == TokenUsageRatio::Exceeded { + (IconName::Close, Severity::Error) } else { - Icon::new(IconName::Warning) - .color(Color::Warning) - .size(IconSize::XSmall) + (IconName::Warning, Severity::Warning) }; let title = if token_usage_ratio == TokenUsageRatio::Exceeded { @@ -1326,29 +1337,33 @@ impl MessageEditor { "To continue, start a new thread from a summary." }; - let mut callout = Callout::new() + let callout = Callout::new() .line_height(line_height) + .severity(severity) .icon(icon) .title(title) .description(description) - .primary_action( - Button::new("start-new-thread", "Start New Thread") - .label_size(LabelSize::Small) - .on_click(cx.listener(|this, _, window, cx| { - let from_thread_id = Some(this.thread.read(cx).id().clone()); - window.dispatch_action(Box::new(NewThread { from_thread_id }), cx); - })), - ); - - if self.is_using_zed_provider(cx) { - callout = callout.secondary_action( - IconButton::new("burn-mode-callout", IconName::ZedBurnMode) - .icon_size(IconSize::XSmall) - .on_click(cx.listener(|this, _event, window, cx| { - this.toggle_burn_mode(&ToggleBurnMode, window, cx); - })), + .actions_slot( + h_flex() + .gap_0p5() + .when(self.is_using_zed_provider(cx), |this| { + this.child( + IconButton::new("burn-mode-callout", IconName::ZedBurnMode) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(|this, _event, window, cx| { + this.toggle_burn_mode(&ToggleBurnMode, window, cx); + })), + ) + }) + .child( + Button::new("start-new-thread", "Start New Thread") + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + let from_thread_id = Some(this.thread.read(cx).id().clone()); + window.dispatch_action(Box::new(NewThread { from_thread_id }), cx); + })), + ), ); - } Some( div() @@ -1385,7 +1400,7 @@ impl MessageEditor { }) .ok(); }); - // Replace existing load task, if any, causing it to be cancelled. + // Replace existing load task, if any, causing it to be canceled. let load_task = load_task.shared(); self.load_context_task = Some(load_task.clone()); cx.spawn(async move |this, cx| { @@ -1427,7 +1442,7 @@ impl MessageEditor { let message_text = editor.read(cx).text(cx); if message_text.is_empty() - && loaded_context.map_or(true, |loaded_context| loaded_context.is_empty()) + && loaded_context.is_none_or(|loaded_context| loaded_context.is_empty()) { return None; } @@ -1540,9 +1555,8 @@ impl ContextCreasesAddon { cx: &mut Context, ) { self.creases.entry(key).or_default().extend(creases); - self._subscription = Some(cx.subscribe( - &context_store, - |editor, _, event, cx| match event { + self._subscription = Some( + cx.subscribe(context_store, |editor, _, event, cx| match event { ContextStoreEvent::ContextRemoved(key) => { let Some(this) = editor.addon_mut::() else { return; @@ -1562,8 +1576,8 @@ impl ContextCreasesAddon { editor.edit(ranges.into_iter().zip(replacement_texts), cx); cx.notify(); } - }, - )) + }), + ) } pub fn into_inner(self) -> HashMap> { @@ -1591,7 +1605,8 @@ pub fn extract_message_creases( .collect::>(); // Filter the addon's list of creases based on what the editor reports, // since the addon might have removed creases in it. - let creases = editor.display_map.update(cx, |display_map, cx| { + + editor.display_map.update(cx, |display_map, cx| { display_map .snapshot(cx) .crease_snapshot @@ -1615,8 +1630,7 @@ pub fn extract_message_creases( } }) .collect() - }); - creases + }) } impl EventEmitter for MessageEditor {} @@ -1668,7 +1682,7 @@ impl Render for MessageEditor { let has_history = self .history_store .as_ref() - .and_then(|hs| hs.update(cx, |hs, cx| hs.entries(cx).len() > 0).ok()) + .and_then(|hs| hs.update(cx, |hs, cx| !hs.entries(cx).is_empty()).ok()) .unwrap_or(false) || self .thread @@ -1681,7 +1695,7 @@ impl Render for MessageEditor { !has_history && is_signed_out && has_configured_providers, |this| this.child(cx.new(ApiKeysWithProviders::new)), ) - .when(changed_buffers.len() > 0, |parent| { + .when(!changed_buffers.is_empty(), |parent| { parent.child(self.render_edits_bar(&changed_buffers, window, cx)) }) .child(self.render_editor(window, cx)) @@ -1786,7 +1800,7 @@ impl AgentPreview for MessageEditor { .bg(cx.theme().colors().panel_background) .border_1() .border_color(cx.theme().colors().border) - .child(default_message_editor.clone()) + .child(default_message_editor) .into_any_element(), )]) .into_any_element(), diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index ddcb44d46b800f257314a8802ad01abc98560ce0..f0f53b96b24c1d4f97fe94ecf155ebb7b73c6fa9 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -1,12 +1,8 @@ use crate::{ManageProfiles, ToggleProfileSelector}; -use agent::{ - Thread, - agent_profile::{AgentProfile, AvailableProfiles}, -}; +use agent::agent_profile::{AgentProfile, AvailableProfiles}; use agent_settings::{AgentDockPosition, AgentProfileId, AgentSettings, builtin_profiles}; use fs::Fs; -use gpui::{Action, Empty, Entity, FocusHandle, Subscription, prelude::*}; -use language_model::LanguageModelRegistry; +use gpui::{Action, Entity, FocusHandle, Subscription, prelude::*}; use settings::{Settings as _, SettingsStore, update_settings_file}; use std::sync::Arc; use ui::{ @@ -14,10 +10,22 @@ use ui::{ prelude::*, }; +/// Trait for types that can provide and manage agent profiles +pub trait ProfileProvider { + /// Get the current profile ID + fn profile_id(&self, cx: &App) -> AgentProfileId; + + /// Set the profile ID + fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App); + + /// Check if profiles are supported in the current context (e.g. if the model that is selected has tool support) + fn profiles_supported(&self, cx: &App) -> bool; +} + pub struct ProfileSelector { profiles: AvailableProfiles, fs: Arc, - thread: Entity, + provider: Arc, menu_handle: PopoverMenuHandle, focus_handle: FocusHandle, _subscriptions: Vec, @@ -26,7 +34,7 @@ pub struct ProfileSelector { impl ProfileSelector { pub fn new( fs: Arc, - thread: Entity, + provider: Arc, focus_handle: FocusHandle, cx: &mut Context, ) -> Self { @@ -37,7 +45,7 @@ impl ProfileSelector { Self { profiles: AgentProfile::available_profiles(cx), fs, - thread, + provider, menu_handle: PopoverMenuHandle::default(), focus_handle, _subscriptions: vec![settings_subscription], @@ -113,10 +121,10 @@ impl ProfileSelector { builtin_profiles::MINIMAL => Some("Chat about anything with no tools."), _ => None, }; - let thread_profile_id = self.thread.read(cx).profile().id(); + let thread_profile_id = self.provider.profile_id(cx); let entry = ContextMenuEntry::new(profile_name.clone()) - .toggleable(IconPosition::End, &profile_id == thread_profile_id); + .toggleable(IconPosition::End, profile_id == thread_profile_id); let entry = if let Some(doc_text) = documentation { entry.documentation_aside(documentation_side(settings.dock), move |_| { @@ -128,19 +136,16 @@ impl ProfileSelector { entry.handler({ let fs = self.fs.clone(); - let thread = self.thread.clone(); - let profile_id = profile_id.clone(); + let provider = self.provider.clone(); move |_window, cx| { update_settings_file::(fs.clone(), cx, { let profile_id = profile_id.clone(); move |settings, _cx| { - settings.set_profile(profile_id.clone()); + settings.set_profile(profile_id); } }); - thread.update(cx, |this, cx| { - this.set_profile(profile_id.clone(), cx); - }); + provider.set_profile(profile_id.clone(), cx); } }) } @@ -149,23 +154,15 @@ impl ProfileSelector { impl Render for ProfileSelector { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let settings = AgentSettings::get_global(cx); - let profile_id = self.thread.read(cx).profile().id(); - let profile = settings.profiles.get(profile_id); + let profile_id = self.provider.profile_id(cx); + let profile = settings.profiles.get(&profile_id); let selected_profile = profile .map(|profile| profile.name.clone()) .unwrap_or_else(|| "Unknown".into()); - let configured_model = self.thread.read(cx).configured_model().or_else(|| { - let model_registry = LanguageModelRegistry::read_global(cx); - model_registry.default_model() - }); - let Some(configured_model) = configured_model else { - return Empty.into_any_element(); - }; - - if configured_model.model.supports_tools() { - let this = cx.entity().clone(); + if self.provider.profiles_supported(cx) { + let this = cx.entity(); let focus_handle = self.focus_handle.clone(); let trigger_button = Button::new("profile-selector-model", selected_profile) .label_size(LabelSize::Small) @@ -177,7 +174,6 @@ impl Render for ProfileSelector { PopoverMenu::new("profile-selector") .trigger_with_tooltip(trigger_button, { - let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( "Toggle Profile Menu", diff --git a/crates/agent_ui/src/slash_command.rs b/crates/agent_ui/src/slash_command.rs index 6b37c5a2d7d6aaf2c9878efb90a22d11ddac2419..87e5d45fe85218c35316cb0a043caadf8837a2ea 100644 --- a/crates/agent_ui/src/slash_command.rs +++ b/crates/agent_ui/src/slash_command.rs @@ -88,8 +88,6 @@ impl SlashCommandCompletionProvider { .map(|(editor, workspace)| { let command_name = mat.string.clone(); let command_range = command_range.clone(); - let editor = editor.clone(); - let workspace = workspace.clone(); Arc::new( move |intent: CompletionIntent, window: &mut Window, @@ -158,7 +156,7 @@ impl SlashCommandCompletionProvider { if let Some(command) = self.slash_commands.command(command_name, cx) { let completions = command.complete_argument( arguments, - new_cancel_flag.clone(), + new_cancel_flag, self.workspace.clone(), window, cx, diff --git a/crates/agent_ui/src/slash_command_picker.rs b/crates/agent_ui/src/slash_command_picker.rs index a757a2f50a1dc03fdd3af6dec341e6c837fa8aac..a6bb61510cbeb557e22018c73082bba17d177d7e 100644 --- a/crates/agent_ui/src/slash_command_picker.rs +++ b/crates/agent_ui/src/slash_command_picker.rs @@ -140,12 +140,10 @@ impl PickerDelegate for SlashCommandDelegate { ); ret.push(index - 1); } - } else { - if let SlashCommandEntry::Advert { .. } = command { - previous_is_advert = true; - if index != 0 { - ret.push(index - 1); - } + } else if let SlashCommandEntry::Advert { .. } = command { + previous_is_advert = true; + if index != 0 { + ret.push(index - 1); } } } @@ -214,7 +212,7 @@ impl PickerDelegate for SlashCommandDelegate { let mut label = format!("{}", info.name); if let Some(args) = info.args.as_ref().filter(|_| selected) { - label.push_str(&args); + label.push_str(args); } Label::new(label) .single_line() @@ -306,7 +304,7 @@ where ) .child( Icon::new(IconName::ArrowUpRight) - .size(IconSize::XSmall) + .size(IconSize::Small) .color(Color::Muted), ), ) @@ -329,9 +327,7 @@ where }; let picker_view = cx.new(|cx| { - let picker = - Picker::uniform_list(delegate, window, cx).max_height(Some(rems(20.).into())); - picker + Picker::uniform_list(delegate, window, cx).max_height(Some(rems(20.).into())) }); let handle = self diff --git a/crates/agent_ui/src/slash_command_settings.rs b/crates/agent_ui/src/slash_command_settings.rs index f254d00ec60b08197237d0f179bc755c16ed7d40..73e5622aa921ccf03a3813717446e830c21079b8 100644 --- a/crates/agent_ui/src/slash_command_settings.rs +++ b/crates/agent_ui/src/slash_command_settings.rs @@ -7,22 +7,11 @@ use settings::{Settings, SettingsSources}; /// Settings for slash commands. #[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] pub struct SlashCommandSettings { - /// Settings for the `/docs` slash command. - #[serde(default)] - pub docs: DocsCommandSettings, /// Settings for the `/cargo-workspace` slash command. #[serde(default)] pub cargo_workspace: CargoWorkspaceCommandSettings, } -/// Settings for the `/docs` slash command. -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] -pub struct DocsCommandSettings { - /// Whether `/docs` is enabled. - #[serde(default)] - pub enabled: bool, -} - /// Settings for the `/cargo-workspace` slash command. #[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] pub struct CargoWorkspaceCommandSettings { diff --git a/crates/agent_ui/src/terminal_codegen.rs b/crates/agent_ui/src/terminal_codegen.rs index 54f5b52f584cb87fd2953a148aa8ae48ea38b862..5a4a9d560a16e858dcaedf706f2067a24bc12c5f 100644 --- a/crates/agent_ui/src/terminal_codegen.rs +++ b/crates/agent_ui/src/terminal_codegen.rs @@ -48,7 +48,7 @@ impl TerminalCodegen { let prompt = prompt_task.await; let model_telemetry_id = model.telemetry_id(); let model_provider_id = model.provider_id(); - let response = model.stream_completion_text(prompt, &cx).await; + let response = model.stream_completion_text(prompt, cx).await; let generate = async { let message_id = response .as_ref() diff --git a/crates/agent_ui/src/terminal_inline_assistant.rs b/crates/agent_ui/src/terminal_inline_assistant.rs index bcbc308c99da7b80e716fce9e60461352dcb814c..e7070c0d7fc4878c1f73a6d5f874607422ae53d6 100644 --- a/crates/agent_ui/src/terminal_inline_assistant.rs +++ b/crates/agent_ui/src/terminal_inline_assistant.rs @@ -388,20 +388,20 @@ impl TerminalInlineAssistant { window: &mut Window, cx: &mut App, ) { - if let Some(assist) = self.assists.get_mut(&assist_id) { - if let Some(prompt_editor) = assist.prompt_editor.as_ref().cloned() { - assist - .terminal - .update(cx, |terminal, cx| { - terminal.clear_block_below_cursor(cx); - let block = terminal_view::BlockProperties { - height, - render: Box::new(move |_| prompt_editor.clone().into_any_element()), - }; - terminal.set_block_below_cursor(block, window, cx); - }) - .log_err(); - } + if let Some(assist) = self.assists.get_mut(&assist_id) + && let Some(prompt_editor) = assist.prompt_editor.as_ref().cloned() + { + assist + .terminal + .update(cx, |terminal, cx| { + terminal.clear_block_below_cursor(cx); + let block = terminal_view::BlockProperties { + height, + render: Box::new(move |_| prompt_editor.clone().into_any_element()), + }; + terminal.set_block_below_cursor(block, window, cx); + }) + .log_err(); } } } @@ -432,7 +432,7 @@ impl TerminalInlineAssist { terminal: terminal.downgrade(), prompt_editor: Some(prompt_editor.clone()), codegen: codegen.clone(), - workspace: workspace.clone(), + workspace, context_store, prompt_store, _subscriptions: vec![ @@ -450,23 +450,20 @@ impl TerminalInlineAssist { return; }; - if let CodegenStatus::Error(error) = &codegen.read(cx).status { - if assist.prompt_editor.is_none() { - if let Some(workspace) = assist.workspace.upgrade() { - let error = - format!("Terminal inline assistant error: {}", error); - workspace.update(cx, |workspace, cx| { - struct InlineAssistantError; - - let id = - NotificationId::composite::( - assist_id.0, - ); - - workspace.show_toast(Toast::new(id, error), cx); - }) - } - } + if let CodegenStatus::Error(error) = &codegen.read(cx).status + && assist.prompt_editor.is_none() + && let Some(workspace) = assist.workspace.upgrade() + { + let error = format!("Terminal inline assistant error: {}", error); + workspace.update(cx, |workspace, cx| { + struct InlineAssistantError; + + let id = NotificationId::composite::( + assist_id.0, + ); + + workspace.show_toast(Toast::new(id, error), cx); + }) } if assist.prompt_editor.is_none() { diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index 4836a95c8efd1dafcd7654802e55c54694280a87..9fbd90c4a69694c7d76f39234aef7da3b105c22c 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -1,14 +1,12 @@ use crate::{ - burn_mode_tooltip::BurnModeTooltip, + QuoteSelection, language_model_selector::{LanguageModelSelector, language_model_selector}, + ui::BurnModeTooltip, }; use agent_settings::{AgentSettings, CompletionMode}; use anyhow::Result; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection, SlashCommandWorkingSet}; -use assistant_slash_commands::{ - DefaultSlashCommand, DocsSlashCommand, DocsSlashCommandArgs, FileSlashCommand, - selections_creases, -}; +use assistant_slash_commands::{DefaultSlashCommand, FileSlashCommand, selections_creases}; use client::{proto, zed_urls}; use collections::{BTreeSet, HashMap, HashSet, hash_map}; use editor::{ @@ -30,7 +28,6 @@ use gpui::{ StatefulInteractiveElement, Styled, Subscription, Task, Transformation, WeakEntity, actions, div, img, percentage, point, prelude::*, pulsating_between, size, }; -use indexed_docs::IndexedDocsStore; use language::{ BufferSnapshot, LspAdapterDelegate, ToOffset, language_settings::{SoftWrap, all_language_settings}, @@ -77,7 +74,7 @@ use crate::{slash_command::SlashCommandCompletionProvider, slash_command_picker} use assistant_context::{ AssistantContext, CacheStatus, Content, ContextEvent, ContextId, InvokedSlashCommandId, InvokedSlashCommandStatus, Message, MessageId, MessageMetadata, MessageStatus, - ParsedSlashCommand, PendingSlashCommandStatus, ThoughtProcessOutputSection, + PendingSlashCommandStatus, ThoughtProcessOutputSection, }; actions!( @@ -93,8 +90,6 @@ actions!( CycleMessageRole, /// Inserts the selected text into the active editor. InsertIntoEditor, - /// Quotes the current selection in the assistant conversation. - QuoteSelection, /// Splits the conversation at the current cursor position. Split, ] @@ -377,7 +372,7 @@ impl TextThreadEditor { .map(|default| default.provider); if provider .as_ref() - .map_or(false, |provider| provider.must_accept_terms(cx)) + .is_some_and(|provider| provider.must_accept_terms(cx)) { self.show_accept_terms = true; cx.notify(); @@ -461,7 +456,7 @@ impl TextThreadEditor { || snapshot .chars_at(newest_cursor) .next() - .map_or(false, |ch| ch != '\n') + .is_some_and(|ch| ch != '\n') { editor.move_to_end_of_line( &MoveToEndOfLine { @@ -544,7 +539,7 @@ impl TextThreadEditor { let context = self.context.read(cx); let sections = context .slash_command_output_sections() - .into_iter() + .iter() .filter(|section| section.is_valid(context.buffer().read(cx))) .cloned() .collect::>(); @@ -701,19 +696,7 @@ impl TextThreadEditor { } }; let render_trailer = { - let command = command.clone(); - move |row, _unfold, _window: &mut Window, cx: &mut App| { - // TODO: In the future we should investigate how we can expose - // this as a hook on the `SlashCommand` trait so that we don't - // need to special-case it here. - if command.name == DocsSlashCommand::NAME { - return render_docs_slash_command_trailer( - row, - command.clone(), - cx, - ); - } - + move |_row, _unfold, _window: &mut Window, _cx: &mut App| { Empty.into_any() } }; @@ -761,32 +744,27 @@ impl TextThreadEditor { ) { if let Some(invoked_slash_command) = self.context.read(cx).invoked_slash_command(&command_id) + && let InvokedSlashCommandStatus::Finished = invoked_slash_command.status { - if let InvokedSlashCommandStatus::Finished = invoked_slash_command.status { - let run_commands_in_ranges = invoked_slash_command - .run_commands_in_ranges - .iter() - .cloned() - .collect::>(); - for range in run_commands_in_ranges { - let commands = self.context.update(cx, |context, cx| { - context.reparse(cx); - context - .pending_commands_for_range(range.clone(), cx) - .to_vec() - }); + let run_commands_in_ranges = invoked_slash_command.run_commands_in_ranges.clone(); + for range in run_commands_in_ranges { + let commands = self.context.update(cx, |context, cx| { + context.reparse(cx); + context + .pending_commands_for_range(range.clone(), cx) + .to_vec() + }); - for command in commands { - self.run_command( - command.source_range, - &command.name, - &command.arguments, - false, - self.workspace.clone(), - window, - cx, - ); - } + for command in commands { + self.run_command( + command.source_range, + &command.name, + &command.arguments, + false, + self.workspace.clone(), + window, + cx, + ); } } } @@ -1258,7 +1236,7 @@ impl TextThreadEditor { let mut new_blocks = vec![]; let mut block_index_to_message = vec![]; for message in self.context.read(cx).messages(cx) { - if let Some(_) = blocks_to_remove.remove(&message.id) { + if blocks_to_remove.remove(&message.id).is_some() { // This is an old message that we might modify. let Some((meta, block_id)) = old_blocks.get_mut(&message.id) else { debug_assert!( @@ -1296,7 +1274,7 @@ impl TextThreadEditor { context_editor_view: &Entity, cx: &mut Context, ) -> Option<(String, bool)> { - const CODE_FENCE_DELIMITER: &'static str = "```"; + const CODE_FENCE_DELIMITER: &str = "```"; let context_editor = context_editor_view.read(cx).editor.clone(); context_editor.update(cx, |context_editor, cx| { @@ -1760,7 +1738,7 @@ impl TextThreadEditor { render_slash_command_output_toggle, |_, _, _, _| Empty.into_any(), ) - .with_metadata(metadata.crease.clone()) + .with_metadata(metadata.crease) }), cx, ); @@ -1831,7 +1809,7 @@ impl TextThreadEditor { .filter_map(|(anchor, render_image)| { const MAX_HEIGHT_IN_LINES: u32 = 8; let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap(); - let image = render_image.clone(); + let image = render_image; anchor.is_valid(&buffer).then(|| BlockProperties { placement: BlockPlacement::Above(anchor), height: Some(MAX_HEIGHT_IN_LINES), @@ -1894,7 +1872,7 @@ impl TextThreadEditor { } fn render_send_button(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let focus_handle = self.focus_handle(cx).clone(); + let focus_handle = self.focus_handle(cx); let (style, tooltip) = match token_state(&self.context, cx) { Some(TokenState::NoTokensLeft { .. }) => ( @@ -2036,7 +2014,7 @@ impl TextThreadEditor { None => IconName::Ai, }; - let focus_handle = self.editor().focus_handle(cx).clone(); + let focus_handle = self.editor().focus_handle(cx); PickerPopoverMenu::new( self.language_model_selector.clone(), @@ -2182,8 +2160,8 @@ impl TextThreadEditor { /// Returns the contents of the *outermost* fenced code block that contains the given offset. fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option> { - const CODE_BLOCK_NODE: &'static str = "fenced_code_block"; - const CODE_BLOCK_CONTENT: &'static str = "code_fence_content"; + const CODE_BLOCK_NODE: &str = "fenced_code_block"; + const CODE_BLOCK_CONTENT: &str = "code_fence_content"; let layer = snapshot.syntax_layers().next()?; @@ -2233,7 +2211,7 @@ fn render_thought_process_fold_icon_button( let button = match status { ThoughtProcessStatus::Pending => button .child( - Icon::new(IconName::LightBulb) + Icon::new(IconName::ToolThink) .size(IconSize::Small) .color(Color::Muted), ) @@ -2248,7 +2226,7 @@ fn render_thought_process_fold_icon_button( ), ThoughtProcessStatus::Completed => button .style(ButtonStyle::Filled) - .child(Icon::new(IconName::LightBulb).size(IconSize::Small)) + .child(Icon::new(IconName::ToolThink).size(IconSize::Small)) .child(Label::new("Thought Process").single_line()), }; @@ -2398,70 +2376,6 @@ fn render_pending_slash_command_gutter_decoration( icon.into_any_element() } -fn render_docs_slash_command_trailer( - row: MultiBufferRow, - command: ParsedSlashCommand, - cx: &mut App, -) -> AnyElement { - if command.arguments.is_empty() { - return Empty.into_any(); - } - let args = DocsSlashCommandArgs::parse(&command.arguments); - - let Some(store) = args - .provider() - .and_then(|provider| IndexedDocsStore::try_global(provider, cx).ok()) - else { - return Empty.into_any(); - }; - - let Some(package) = args.package() else { - return Empty.into_any(); - }; - - let mut children = Vec::new(); - - if store.is_indexing(&package) { - children.push( - div() - .id(("crates-being-indexed", row.0)) - .child(Icon::new(IconName::ArrowCircle).with_animation( - "arrow-circle", - Animation::new(Duration::from_secs(4)).repeat(), - |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), - )) - .tooltip({ - let package = package.clone(); - Tooltip::text(format!("Indexing {package}…")) - }) - .into_any_element(), - ); - } - - if let Some(latest_error) = store.latest_error_for_package(&package) { - children.push( - div() - .id(("latest-error", row.0)) - .child( - Icon::new(IconName::Warning) - .size(IconSize::Small) - .color(Color::Warning), - ) - .tooltip(Tooltip::text(format!("Failed to index: {latest_error}"))) - .into_any_element(), - ) - } - - let is_indexing = store.is_indexing(&package); - let latest_error = store.latest_error_for_package(&package); - - if !is_indexing && latest_error.is_none() { - return Empty.into_any(); - } - - h_flex().gap_2().children(children).into_any_element() -} - #[derive(Debug, Clone, Serialize, Deserialize)] struct CopyMetadata { creases: Vec, @@ -3214,7 +3128,7 @@ mod tests { let context_editor = window .update(&mut cx, |_, window, cx| { cx.new(|cx| { - let editor = TextThreadEditor::for_context( + TextThreadEditor::for_context( context.clone(), fs, workspace.downgrade(), @@ -3222,8 +3136,7 @@ mod tests { None, window, cx, - ); - editor + ) }) }) .unwrap(); diff --git a/crates/agent_ui/src/thread_history.rs b/crates/agent_ui/src/thread_history.rs index b8d1db88d6e3164b32ade0f2137ad7ca37a0650a..4ec2078e5db376fbe45c528d9cce2c13e3175ba5 100644 --- a/crates/agent_ui/src/thread_history.rs +++ b/crates/agent_ui/src/thread_history.rs @@ -166,14 +166,13 @@ impl ThreadHistory { this.all_entries.len().saturating_sub(1), cx, ); - } else if let Some(prev_id) = previously_selected_entry { - if let Some(new_ix) = this + } else if let Some(prev_id) = previously_selected_entry + && let Some(new_ix) = this .all_entries .iter() .position(|probe| probe.id() == prev_id) - { - this.set_selected_entry_index(new_ix, cx); - } + { + this.set_selected_entry_index(new_ix, cx); } } SearchState::Searching { query, .. } | SearchState::Searched { query, .. } => { @@ -541,6 +540,7 @@ impl Render for ThreadHistory { v_flex() .key_context("ThreadHistory") .size_full() + .bg(cx.theme().colors().panel_background) .on_action(cx.listener(Self::select_previous)) .on_action(cx.listener(Self::select_next)) .on_action(cx.listener(Self::select_first)) diff --git a/crates/agent_ui/src/tool_compatibility.rs b/crates/agent_ui/src/tool_compatibility.rs index d4e1da5bb0a532c8307364582349378d98c51a26..046c0a4abc5e3ac0130b2af5406cfbb3f977b00c 100644 --- a/crates/agent_ui/src/tool_compatibility.rs +++ b/crates/agent_ui/src/tool_compatibility.rs @@ -14,13 +14,11 @@ pub struct IncompatibleToolsState { impl IncompatibleToolsState { pub fn new(thread: Entity, cx: &mut Context) -> Self { - let _tool_working_set_subscription = - cx.subscribe(&thread, |this, _, event, _| match event { - ThreadEvent::ProfileChanged => { - this.cache.clear(); - } - _ => {} - }); + let _tool_working_set_subscription = cx.subscribe(&thread, |this, _, event, _| { + if let ThreadEvent::ProfileChanged = event { + this.cache.clear(); + } + }); Self { cache: HashMap::default(), diff --git a/crates/agent_ui/src/ui.rs b/crates/agent_ui/src/ui.rs index b477a8c385c5f8aee85b54cf5f82cdd49e2e2484..e27a2242404e4f3b1d721b66ac2aad9119c4447a 100644 --- a/crates/agent_ui/src/ui.rs +++ b/crates/agent_ui/src/ui.rs @@ -2,7 +2,6 @@ mod agent_notification; mod burn_mode_tooltip; mod context_pill; mod end_trial_upsell; -mod new_thread_button; mod onboarding_modal; pub mod preview; @@ -10,5 +9,4 @@ pub use agent_notification::*; pub use burn_mode_tooltip::*; pub use context_pill::*; pub use end_trial_upsell::*; -pub use new_thread_button::*; pub use onboarding_modal::*; diff --git a/crates/agent_ui/src/ui/burn_mode_tooltip.rs b/crates/agent_ui/src/ui/burn_mode_tooltip.rs index 97f7853a61bc2bc2766492e077e34c3f1b534abe..72faaa614d0d531365fef9ba5ff0e62a6fbcf145 100644 --- a/crates/agent_ui/src/ui/burn_mode_tooltip.rs +++ b/crates/agent_ui/src/ui/burn_mode_tooltip.rs @@ -2,11 +2,11 @@ use crate::ToggleBurnMode; use gpui::{Context, FontWeight, IntoElement, Render, Window}; use ui::{KeyBinding, prelude::*, tooltip_container}; -pub struct MaxModeTooltip { +pub struct BurnModeTooltip { selected: bool, } -impl MaxModeTooltip { +impl BurnModeTooltip { pub fn new() -> Self { Self { selected: false } } @@ -17,7 +17,7 @@ impl MaxModeTooltip { } } -impl Render for MaxModeTooltip { +impl Render for BurnModeTooltip { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let (icon, color) = if self.selected { (IconName::ZedBurnModeOn, Color::Error) diff --git a/crates/agent_ui/src/ui/context_pill.rs b/crates/agent_ui/src/ui/context_pill.rs index 5dd57de24490df03ce0f2c41a844be33fb675793..7c7fbd27f0d4ebe3b5c42cc6c5a244ae6add5614 100644 --- a/crates/agent_ui/src/ui/context_pill.rs +++ b/crates/agent_ui/src/ui/context_pill.rs @@ -353,7 +353,7 @@ impl AddedContext { name, parent, tooltip: Some(full_path_string), - icon_path: FileIcons::get_icon(&full_path, cx), + icon_path: FileIcons::get_icon(full_path, cx), status: ContextStatus::Ready, render_hover: None, handle: AgentContextHandle::File(handle), @@ -499,7 +499,7 @@ impl AddedContext { let thread = handle.thread.clone(); Some(Rc::new(move |_, cx| { let text = thread.read(cx).latest_detailed_summary_or_text(); - ContextPillHover::new_text(text.clone(), cx).into() + ContextPillHover::new_text(text, cx).into() })) }, handle: AgentContextHandle::Thread(handle), @@ -574,7 +574,7 @@ impl AddedContext { .unwrap_or_else(|| "Unnamed Rule".into()); Some(AddedContext { kind: ContextKind::Rules, - name: title.clone(), + name: title, parent: None, tooltip: None, icon_path: None, @@ -615,7 +615,7 @@ impl AddedContext { let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into(); let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, &full_path_string); - let icon_path = FileIcons::get_icon(&full_path, cx); + let icon_path = FileIcons::get_icon(full_path, cx); (name, parent, icon_path) } else { ("Image".into(), None, None) @@ -706,7 +706,7 @@ impl ContextFileExcerpt { .and_then(|p| p.file_name()) .map(|n| n.to_string_lossy().into_owned().into()); - let icon_path = FileIcons::get_icon(&full_path, cx); + let icon_path = FileIcons::get_icon(full_path, cx); ContextFileExcerpt { file_name_and_range: file_name_and_range.into(), diff --git a/crates/agent_ui/src/ui/new_thread_button.rs b/crates/agent_ui/src/ui/new_thread_button.rs deleted file mode 100644 index 7764144150762f9b828ea98f1c917332759bd5ad..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/ui/new_thread_button.rs +++ /dev/null @@ -1,75 +0,0 @@ -use gpui::{ClickEvent, ElementId, IntoElement, ParentElement, Styled}; -use ui::prelude::*; - -#[derive(IntoElement)] -pub struct NewThreadButton { - id: ElementId, - label: SharedString, - icon: IconName, - keybinding: Option, - on_click: Option>, -} - -impl NewThreadButton { - pub fn new(id: impl Into, label: impl Into, icon: IconName) -> Self { - Self { - id: id.into(), - label: label.into(), - icon, - keybinding: None, - on_click: None, - } - } - - pub fn keybinding(mut self, keybinding: Option) -> Self { - self.keybinding = keybinding; - self - } - - pub fn on_click(mut self, handler: F) -> Self - where - F: Fn(&mut Window, &mut App) + 'static, - { - self.on_click = Some(Box::new( - move |_: &ClickEvent, window: &mut Window, cx: &mut App| handler(window, cx), - )); - self - } -} - -impl RenderOnce for NewThreadButton { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - h_flex() - .id(self.id) - .w_full() - .py_1p5() - .px_2() - .gap_1() - .justify_between() - .rounded_md() - .border_1() - .border_color(cx.theme().colors().border.opacity(0.4)) - .bg(cx.theme().colors().element_active.opacity(0.2)) - .hover(|style| { - style - .bg(cx.theme().colors().element_hover) - .border_color(cx.theme().colors().border) - }) - .child( - h_flex() - .gap_1p5() - .child( - Icon::new(self.icon) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child(Label::new(self.label).size(LabelSize::Small)), - ) - .when_some(self.keybinding, |this, keybinding| { - this.child(keybinding.size(rems_from_px(10.))) - }) - .when_some(self.on_click, |this, on_click| { - this.on_click(move |event, window, cx| on_click(event, window, cx)) - }) - } -} diff --git a/crates/agent_ui/src/ui/onboarding_modal.rs b/crates/agent_ui/src/ui/onboarding_modal.rs index 9e04171ec985c009cd9e0d58314dee017c666eff..b8b038bdfca334ba048aed0a59cecd0f3da285b0 100644 --- a/crates/agent_ui/src/ui/onboarding_modal.rs +++ b/crates/agent_ui/src/ui/onboarding_modal.rs @@ -139,7 +139,7 @@ impl Render for AgentOnboardingModal { .child(Headline::new("Agentic Editing in Zed").size(HeadlineSize::Large)), ) .child(h_flex().absolute().top_2().right_2().child( - IconButton::new("cancel", IconName::X).on_click(cx.listener( + IconButton::new("cancel", IconName::Close).on_click(cx.listener( |_, _: &ClickEvent, _window, cx| { agent_onboarding_event!("Cancelled", trigger = "X click"); cx.emit(DismissEvent); diff --git a/crates/agent_ui/src/ui/preview/usage_callouts.rs b/crates/agent_ui/src/ui/preview/usage_callouts.rs index 64869a6ec71cdbe8e3532983c48784136b3dcb36..29b12ea62772fc21b49526ff0fcd06d053dc9c48 100644 --- a/crates/agent_ui/src/ui/preview/usage_callouts.rs +++ b/crates/agent_ui/src/ui/preview/usage_callouts.rs @@ -80,14 +80,10 @@ impl RenderOnce for UsageCallout { } }; - let icon = if is_limit_reached { - Icon::new(IconName::X) - .color(Color::Error) - .size(IconSize::XSmall) + let (icon, severity) = if is_limit_reached { + (IconName::Close, Severity::Error) } else { - Icon::new(IconName::Warning) - .color(Color::Warning) - .size(IconSize::XSmall) + (IconName::Warning, Severity::Warning) }; div() @@ -95,10 +91,12 @@ impl RenderOnce for UsageCallout { .border_color(cx.theme().colors().border) .child( Callout::new() + .icon(icon) + .severity(severity) .icon(icon) .title(title) .description(message) - .primary_action( + .actions_slot( Button::new("upgrade", button_text) .label_size(LabelSize::Small) .on_click(move |_, _, cx| { diff --git a/crates/ai_onboarding/src/agent_api_keys_onboarding.rs b/crates/ai_onboarding/src/agent_api_keys_onboarding.rs index b55ad4c89549a8843fe2d8273da60236400cb565..fadc4222ae44f3dbad862fd9479b89321dbd3016 100644 --- a/crates/ai_onboarding/src/agent_api_keys_onboarding.rs +++ b/crates/ai_onboarding/src/agent_api_keys_onboarding.rs @@ -11,7 +11,7 @@ impl ApiKeysWithProviders { cx.subscribe( &LanguageModelRegistry::global(cx), |this: &mut Self, _registry, event: &language_model::Event, cx| match event { - language_model::Event::ProviderStateChanged + language_model::Event::ProviderStateChanged(_) | language_model::Event::AddedProvider(_) | language_model::Event::RemovedProvider(_) => { this.configured_providers = Self::compute_configured_providers(cx) @@ -33,7 +33,7 @@ impl ApiKeysWithProviders { .filter(|provider| { provider.is_authenticated(cx) && provider.id() != ZED_CLOUD_PROVIDER_ID }) - .map(|provider| (provider.icon(), provider.name().0.clone())) + .map(|provider| (provider.icon(), provider.name().0)) .collect() } } diff --git a/crates/ai_onboarding/src/agent_panel_onboarding_content.rs b/crates/ai_onboarding/src/agent_panel_onboarding_content.rs index f1629eeff81ef51bf2ff823eef0db64c1585a669..77f41d1a734afb4d21154cb536c19b4bd04632b1 100644 --- a/crates/ai_onboarding/src/agent_panel_onboarding_content.rs +++ b/crates/ai_onboarding/src/agent_panel_onboarding_content.rs @@ -25,7 +25,7 @@ impl AgentPanelOnboarding { cx.subscribe( &LanguageModelRegistry::global(cx), |this: &mut Self, _registry, event: &language_model::Event, cx| match event { - language_model::Event::ProviderStateChanged + language_model::Event::ProviderStateChanged(_) | language_model::Event::AddedProvider(_) | language_model::Event::RemovedProvider(_) => { this.configured_providers = Self::compute_available_providers(cx) @@ -50,7 +50,7 @@ impl AgentPanelOnboarding { .filter(|provider| { provider.is_authenticated(cx) && provider.id() != ZED_CLOUD_PROVIDER_ID }) - .map(|provider| (provider.icon(), provider.name().0.clone())) + .map(|provider| (provider.icon(), provider.name().0)) .collect() } } @@ -74,7 +74,7 @@ impl Render for AgentPanelOnboarding { }), ) .map(|this| { - if enrolled_in_trial || is_pro_user || self.configured_providers.len() >= 1 { + if enrolled_in_trial || is_pro_user || !self.configured_providers.is_empty() { this } else { this.child(ApiKeysWithoutProviders::new()) diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index b9a1e49a4acdfb3f4a94b6313d1e6fb3ef969adc..717abebfd1dd59ee277f7ae2343024aaafeb825e 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -110,7 +110,7 @@ impl ZedAiOnboarding { .style(ButtonStyle::Outlined) .icon(IconName::ArrowUpRight) .icon_color(Color::Muted) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .on_click(move |_, _window, cx| { telemetry::event!("Review Terms of Service Clicked"); cx.open_url(&zed_urls::terms_of_service(cx)) @@ -332,17 +332,25 @@ impl ZedAiOnboarding { .mb_2(), ) .child(plan_definitions.pro_plan(false)) - .child( - Button::new("pro", "Continue with Zed Pro") - .full_width() - .style(ButtonStyle::Outlined) - .on_click({ - let callback = self.continue_with_zed_ai.clone(); - move |_, window, cx| { - telemetry::event!("Banner Dismissed", source = "AI Onboarding"); - callback(window, cx) - } - }), + .when_some( + self.dismiss_onboarding.as_ref(), + |this, dismiss_callback| { + let callback = dismiss_callback.clone(); + this.child( + h_flex().absolute().top_0().right_0().child( + IconButton::new("dismiss_onboarding", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Dismiss")) + .on_click(move |_, window, cx| { + telemetry::event!( + "Banner Dismissed", + source = "AI Onboarding", + ); + callback(window, cx) + }), + ), + ) + }, ) .into_any_element() } diff --git a/crates/ai_onboarding/src/young_account_banner.rs b/crates/ai_onboarding/src/young_account_banner.rs index 54f563e4aac8ca71fff16199cd6c2e8f81ad5376..ed9a6b3b35fb2e8e3afaa9d9b658539dd3fa6541 100644 --- a/crates/ai_onboarding/src/young_account_banner.rs +++ b/crates/ai_onboarding/src/young_account_banner.rs @@ -17,6 +17,6 @@ impl RenderOnce for YoungAccountBanner { div() .max_w_full() .my_1() - .child(Banner::new().severity(ui::Severity::Warning).child(label)) + .child(Banner::new().severity(Severity::Warning).child(label)) } } diff --git a/crates/askpass/src/askpass.rs b/crates/askpass/src/askpass.rs index f085a2be72d04d7c1d16f855230011639853ddf2..9e84a9fed03c8a620c7cb33cc76ef22c000c3fa6 100644 --- a/crates/askpass/src/askpass.rs +++ b/crates/askpass/src/askpass.rs @@ -177,11 +177,11 @@ impl AskPassSession { _ = askpass_opened_rx.fuse() => { // Note: this await can only resolve after we are dropped. askpass_kill_master_rx.await.ok(); - return AskPassResult::CancelledByUser + AskPassResult::CancelledByUser } _ = futures::FutureExt::fuse(smol::Timer::after(connection_timeout)) => { - return AskPassResult::Timedout + AskPassResult::Timedout } } } @@ -215,7 +215,7 @@ pub fn main(socket: &str) { } #[cfg(target_os = "windows")] - while buffer.last().map_or(false, |&b| b == b'\n' || b == b'\r') { + while buffer.last().is_some_and(|&b| b == b'\n' || b == b'\r') { buffer.pop(); } if buffer.last() != Some(&b'\0') { diff --git a/crates/assets/src/assets.rs b/crates/assets/src/assets.rs index fad0c58b73c61c692a34f899aff70fb9bd6bed98..5c7e671159a62f40a36ea2ea857fd5a93995bf04 100644 --- a/crates/assets/src/assets.rs +++ b/crates/assets/src/assets.rs @@ -58,9 +58,7 @@ impl Assets { pub fn load_test_fonts(&self, cx: &App) { cx.text_system() .add_fonts(vec![ - self.load("fonts/plex-mono/ZedPlexMono-Regular.ttf") - .unwrap() - .unwrap(), + self.load("fonts/lilex/Lilex-Regular.ttf").unwrap().unwrap(), ]) .unwrap() } diff --git a/crates/assistant_context/Cargo.toml b/crates/assistant_context/Cargo.toml index 8f5ff98790f2319c398b7acf04214cd2b3f577f4..45c0072418782909829ba3186138f0c6a9456654 100644 --- a/crates/assistant_context/Cargo.toml +++ b/crates/assistant_context/Cargo.toml @@ -11,6 +11,9 @@ workspace = true [lib] path = "src/assistant_context.rs" +[features] +test-support = [] + [dependencies] agent_settings.workspace = true anyhow.workspace = true diff --git a/crates/assistant_context/src/assistant_context.rs b/crates/assistant_context/src/assistant_context.rs index 557f9592e4d12e86c4e73d1bc742dfa74535d66c..12eda0954a2e1cca9ddc7df9816b8f5a37d0ce10 100644 --- a/crates/assistant_context/src/assistant_context.rs +++ b/crates/assistant_context/src/assistant_context.rs @@ -590,17 +590,16 @@ impl From<&Message> for MessageMetadata { impl MessageMetadata { pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range) -> bool { - let result = match &self.cache { + match &self.cache { Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range( - &cached_at, + cached_at, Range { start: buffer.anchor_at(range.start, Bias::Right), end: buffer.anchor_at(range.end, Bias::Left), }, ), _ => false, - }; - result + } } } @@ -1023,9 +1022,11 @@ impl AssistantContext { summary: new_summary, .. } => { - if self.summary.timestamp().map_or(true, |current_timestamp| { - new_summary.timestamp > current_timestamp - }) { + if self + .summary + .timestamp() + .is_none_or(|current_timestamp| new_summary.timestamp > current_timestamp) + { self.summary = ContextSummary::Content(new_summary); summary_generated = true; } @@ -1076,20 +1077,20 @@ impl AssistantContext { timestamp, .. } => { - if let Some(slash_command) = self.invoked_slash_commands.get_mut(&id) { - if timestamp > slash_command.timestamp { - slash_command.timestamp = timestamp; - match error_message { - Some(message) => { - slash_command.status = - InvokedSlashCommandStatus::Error(message.into()); - } - None => { - slash_command.status = InvokedSlashCommandStatus::Finished; - } + if let Some(slash_command) = self.invoked_slash_commands.get_mut(&id) + && timestamp > slash_command.timestamp + { + slash_command.timestamp = timestamp; + match error_message { + Some(message) => { + slash_command.status = + InvokedSlashCommandStatus::Error(message.into()); + } + None => { + slash_command.status = InvokedSlashCommandStatus::Finished; } - cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id: id }); } + cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id: id }); } } ContextOperation::BufferOperation(_) => unreachable!(), @@ -1339,7 +1340,7 @@ impl AssistantContext { let is_invalid = self .messages_metadata .get(&message_id) - .map_or(true, |metadata| { + .is_none_or(|metadata| { !metadata.is_cache_valid(&buffer, &message.offset_range) || *encountered_invalid }); @@ -1368,10 +1369,10 @@ impl AssistantContext { continue; } - if let Some(last_anchor) = last_anchor { - if message.id == last_anchor { - hit_last_anchor = true; - } + if let Some(last_anchor) = last_anchor + && message.id == last_anchor + { + hit_last_anchor = true; } new_anchor_needs_caching = new_anchor_needs_caching @@ -1406,14 +1407,14 @@ impl AssistantContext { if !self.pending_completions.is_empty() { return; } - if let Some(cache_configuration) = cache_configuration { - if !cache_configuration.should_speculate { - return; - } + if let Some(cache_configuration) = cache_configuration + && !cache_configuration.should_speculate + { + return; } let request = { - let mut req = self.to_completion_request(Some(&model), cx); + let mut req = self.to_completion_request(Some(model), cx); // Skip the last message because it's likely to change and // therefore would be a waste to cache. req.messages.pop(); @@ -1428,7 +1429,7 @@ impl AssistantContext { let model = Arc::clone(model); self.pending_cache_warming_task = cx.spawn(async move |this, cx| { async move { - match model.stream_completion(request, &cx).await { + match model.stream_completion(request, cx).await { Ok(mut stream) => { stream.next().await; log::info!("Cache warming completed successfully"); @@ -1552,25 +1553,24 @@ impl AssistantContext { }) .map(ToOwned::to_owned) .collect::>(); - if let Some(command) = self.slash_commands.command(name, cx) { - if !command.requires_argument() || !arguments.is_empty() { - let start_ix = offset + command_line.name.start - 1; - let end_ix = offset - + command_line - .arguments - .last() - .map_or(command_line.name.end, |argument| argument.end); - let source_range = - buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix); - let pending_command = ParsedSlashCommand { - name: name.to_string(), - arguments, - source_range, - status: PendingSlashCommandStatus::Idle, - }; - updated.push(pending_command.clone()); - new_commands.push(pending_command); - } + if let Some(command) = self.slash_commands.command(name, cx) + && (!command.requires_argument() || !arguments.is_empty()) + { + let start_ix = offset + command_line.name.start - 1; + let end_ix = offset + + command_line + .arguments + .last() + .map_or(command_line.name.end, |argument| argument.end); + let source_range = buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix); + let pending_command = ParsedSlashCommand { + name: name.to_string(), + arguments, + source_range, + status: PendingSlashCommandStatus::Idle, + }; + updated.push(pending_command.clone()); + new_commands.push(pending_command); } } @@ -1661,12 +1661,12 @@ impl AssistantContext { ) -> Range { let buffer = self.buffer.read(cx); let start_ix = match all_annotations - .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer)) + .binary_search_by(|probe| probe.range().end.cmp(&range.start, buffer)) { Ok(ix) | Err(ix) => ix, }; let end_ix = match all_annotations - .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer)) + .binary_search_by(|probe| probe.range().start.cmp(&range.end, buffer)) { Ok(ix) => ix + 1, Err(ix) => ix, @@ -1799,14 +1799,13 @@ impl AssistantContext { }); let end = this.buffer.read(cx).anchor_before(insert_position); - if run_commands_in_text { - if let Some(invoked_slash_command) = + if run_commands_in_text + && let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id) - { - invoked_slash_command - .run_commands_in_ranges - .push(start..end); - } + { + invoked_slash_command + .run_commands_in_ranges + .push(start..end); } } SlashCommandEvent::EndSection => { @@ -1862,7 +1861,7 @@ impl AssistantContext { { let newline_offset = insert_position.saturating_sub(1); if buffer.contains_str_at(newline_offset, "\n") - && last_section_range.map_or(true, |last_section_range| { + && last_section_range.is_none_or(|last_section_range| { !last_section_range .to_offset(buffer) .contains(&newline_offset) @@ -2045,7 +2044,7 @@ impl AssistantContext { let task = cx.spawn({ async move |this, cx| { - let stream = model.stream_completion(request, &cx); + let stream = model.stream_completion(request, cx); let assistant_message_id = assistant_message.id; let mut response_latency = None; let stream_completion = async { @@ -2081,15 +2080,12 @@ impl AssistantContext { match event { LanguageModelCompletionEvent::StatusUpdate(status_update) => { - match status_update { - CompletionRequestStatus::UsageUpdated { amount, limit } => { - this.update_model_request_usage( - amount as u32, - limit, - cx, - ); - } - _ => {} + if let CompletionRequestStatus::UsageUpdated { amount, limit } = status_update { + this.update_model_request_usage( + amount as u32, + limit, + cx, + ); } } LanguageModelCompletionEvent::StartMessage { .. } => {} @@ -2286,7 +2282,7 @@ impl AssistantContext { let mut contents = self.contents(cx).peekable(); fn collect_text_content(buffer: &Buffer, range: Range) -> Option { - let text: String = buffer.text_for_range(range.clone()).collect(); + let text: String = buffer.text_for_range(range).collect(); if text.trim().is_empty() { None } else { @@ -2315,10 +2311,7 @@ impl AssistantContext { let mut request_message = LanguageModelRequestMessage { role: message.role, content: Vec::new(), - cache: message - .cache - .as_ref() - .map_or(false, |cache| cache.is_anchor), + cache: message.cache.as_ref().is_some_and(|cache| cache.is_anchor), }; while let Some(content) = contents.peek() { @@ -2708,7 +2701,7 @@ impl AssistantContext { self.summary_task = cx.spawn(async move |this, cx| { let result = async { - let stream = model.model.stream_completion_text(request, &cx); + let stream = model.model.stream_completion_text(request, cx); let mut messages = stream.await?; let mut replaced = !replace_old; @@ -2741,10 +2734,10 @@ impl AssistantContext { } this.read_with(cx, |this, _cx| { - if let Some(summary) = this.summary.content() { - if summary.text.is_empty() { - bail!("Model generated an empty summary"); - } + if let Some(summary) = this.summary.content() + && summary.text.is_empty() + { + bail!("Model generated an empty summary"); } Ok(()) })??; @@ -2799,7 +2792,7 @@ impl AssistantContext { let mut current_message = messages.next(); while let Some(offset) = offsets.next() { // Locate the message that contains the offset. - while current_message.as_ref().map_or(false, |message| { + while current_message.as_ref().is_some_and(|message| { !message.offset_range.contains(&offset) && messages.peek().is_some() }) { current_message = messages.next(); @@ -2809,7 +2802,7 @@ impl AssistantContext { }; // Skip offsets that are in the same message. - while offsets.peek().map_or(false, |offset| { + while offsets.peek().is_some_and(|offset| { message.offset_range.contains(offset) || messages.peek().is_none() }) { offsets.next(); @@ -2924,18 +2917,18 @@ impl AssistantContext { fs.create_dir(contexts_dir().as_ref()).await?; // rename before write ensures that only one file exists - if let Some(old_path) = old_path.as_ref() { - if new_path.as_path() != old_path.as_ref() { - fs.rename( - &old_path, - &new_path, - RenameOptions { - overwrite: true, - ignore_if_exists: true, - }, - ) - .await?; - } + if let Some(old_path) = old_path.as_ref() + && new_path.as_path() != old_path.as_ref() + { + fs.rename( + old_path, + &new_path, + RenameOptions { + overwrite: true, + ignore_if_exists: true, + }, + ) + .await?; } // update path before write in case it fails diff --git a/crates/assistant_context/src/assistant_context_tests.rs b/crates/assistant_context/src/assistant_context_tests.rs index efcad8ed9654449c747ee4853c7e7aa689c0568b..61d748cbddb0858dda2f181ea6c943426393e087 100644 --- a/crates/assistant_context/src/assistant_context_tests.rs +++ b/crates/assistant_context/src/assistant_context_tests.rs @@ -1055,7 +1055,7 @@ fn test_mark_cache_anchors(cx: &mut App) { assert_eq!( messages_cache(&context, cx) .iter() - .filter(|(_, cache)| cache.as_ref().map_or(false, |cache| cache.is_anchor)) + .filter(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .count(), 0, "Empty messages should not have any cache anchors." @@ -1083,7 +1083,7 @@ fn test_mark_cache_anchors(cx: &mut App) { assert_eq!( messages_cache(&context, cx) .iter() - .filter(|(_, cache)| cache.as_ref().map_or(false, |cache| cache.is_anchor)) + .filter(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .count(), 0, "Messages should not be marked for cache before going over the token minimum." @@ -1098,7 +1098,7 @@ fn test_mark_cache_anchors(cx: &mut App) { assert_eq!( messages_cache(&context, cx) .iter() - .map(|(_, cache)| cache.as_ref().map_or(false, |cache| cache.is_anchor)) + .map(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .collect::>(), vec![true, true, false], "Last message should not be an anchor on speculative request." @@ -1116,7 +1116,7 @@ fn test_mark_cache_anchors(cx: &mut App) { assert_eq!( messages_cache(&context, cx) .iter() - .map(|(_, cache)| cache.as_ref().map_or(false, |cache| cache.is_anchor)) + .map(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .collect::>(), vec![false, true, true, false], "Most recent message should also be cached if not a speculative request." @@ -1300,7 +1300,7 @@ fn test_summarize_error( context.assist(cx); }); - simulate_successful_response(&model, cx); + simulate_successful_response(model, cx); context.read_with(cx, |context, _| { assert!(!context.summary().content().unwrap().done); @@ -1321,7 +1321,7 @@ fn test_summarize_error( fn setup_context_editor_with_fake_model( cx: &mut TestAppContext, ) -> (Entity, Arc) { - let registry = Arc::new(LanguageRegistry::test(cx.executor().clone())); + let registry = Arc::new(LanguageRegistry::test(cx.executor())); let fake_provider = Arc::new(FakeLanguageModelProvider::default()); let fake_model = Arc::new(fake_provider.test_model()); @@ -1376,7 +1376,7 @@ fn messages_cache( context .read(cx) .messages(cx) - .map(|message| (message.id, message.cache.clone())) + .map(|message| (message.id, message.cache)) .collect() } @@ -1436,6 +1436,6 @@ impl SlashCommand for FakeSlashCommand { sections: vec![], run_commands_in_text: false, } - .to_event_stream())) + .into_event_stream())) } } diff --git a/crates/assistant_context/src/context_store.rs b/crates/assistant_context/src/context_store.rs index 3090a7b23439de9ae8fe1bd5287f439895f17a98..6960d9db7948e8f09ea65ede91702d98e6bc99be 100644 --- a/crates/assistant_context/src/context_store.rs +++ b/crates/assistant_context/src/context_store.rs @@ -138,6 +138,27 @@ impl ContextStore { }) } + #[cfg(any(test, feature = "test-support"))] + pub fn fake(project: Entity, cx: &mut Context) -> Self { + Self { + contexts: Default::default(), + contexts_metadata: Default::default(), + context_server_slash_command_ids: Default::default(), + host_contexts: Default::default(), + fs: project.read(cx).fs().clone(), + languages: project.read(cx).languages().clone(), + slash_commands: Arc::default(), + telemetry: project.read(cx).client().telemetry().clone(), + _watch_updates: Task::ready(None), + client: project.read(cx).client(), + project, + project_is_shared: false, + client_subscription: None, + _project_subscriptions: Default::default(), + prompt_builder: Arc::new(PromptBuilder::new(None).unwrap()), + } + } + async fn handle_advertise_contexts( this: Entity, envelope: TypedEnvelope, @@ -299,7 +320,7 @@ impl ContextStore { .client .subscribe_to_entity(remote_id) .log_err() - .map(|subscription| subscription.set_entity(&cx.entity(), &mut cx.to_async())); + .map(|subscription| subscription.set_entity(&cx.entity(), &cx.to_async())); self.advertise_contexts(cx); } else { self.client_subscription = None; @@ -768,7 +789,7 @@ impl ContextStore { let fs = self.fs.clone(); cx.spawn(async move |this, cx| { pub static ZED_STATELESS: LazyLock = - LazyLock::new(|| std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty())); + LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty())); if *ZED_STATELESS { return Ok(()); } @@ -841,7 +862,7 @@ impl ContextStore { ContextServerStatus::Running => { self.load_context_server_slash_commands( server_id.clone(), - context_server_store.clone(), + context_server_store, cx, ); } @@ -873,34 +894,33 @@ impl ContextStore { return; }; - if protocol.capable(context_server::protocol::ServerCapability::Prompts) { - if let Some(response) = protocol + if protocol.capable(context_server::protocol::ServerCapability::Prompts) + && let Some(response) = protocol .request::(()) .await .log_err() - { - let slash_command_ids = response - .prompts - .into_iter() - .filter(assistant_slash_commands::acceptable_prompt) - .map(|prompt| { - log::info!("registering context server command: {:?}", prompt.name); - slash_command_working_set.insert(Arc::new( - assistant_slash_commands::ContextServerSlashCommand::new( - context_server_store.clone(), - server.id(), - prompt, - ), - )) - }) - .collect::>(); - - this.update(cx, |this, _cx| { - this.context_server_slash_command_ids - .insert(server_id.clone(), slash_command_ids); + { + let slash_command_ids = response + .prompts + .into_iter() + .filter(assistant_slash_commands::acceptable_prompt) + .map(|prompt| { + log::info!("registering context server command: {:?}", prompt.name); + slash_command_working_set.insert(Arc::new( + assistant_slash_commands::ContextServerSlashCommand::new( + context_server_store.clone(), + server.id(), + prompt, + ), + )) }) - .log_err(); - } + .collect::>(); + + this.update(cx, |this, _cx| { + this.context_server_slash_command_ids + .insert(server_id.clone(), slash_command_ids); + }) + .log_err(); } }) .detach(); diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index 828f115bf5ed8cfedf14c67243b4a8048d07ebd0..4b85fa2edf2afd6b3ea7df154b5e14ab492a8013 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -161,7 +161,7 @@ impl SlashCommandOutput { } /// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s. - pub fn to_event_stream(mut self) -> BoxStream<'static, Result> { + pub fn into_event_stream(mut self) -> BoxStream<'static, Result> { self.ensure_valid_section_ranges(); let mut events = Vec::new(); @@ -363,7 +363,7 @@ mod tests { run_commands_in_text: false, }; - let events = output.clone().to_event_stream().collect::>().await; + let events = output.clone().into_event_stream().collect::>().await; let events = events .into_iter() .filter_map(|event| event.ok()) @@ -386,7 +386,7 @@ mod tests { ); let new_output = - SlashCommandOutput::from_event_stream(output.clone().to_event_stream()) + SlashCommandOutput::from_event_stream(output.clone().into_event_stream()) .await .unwrap(); @@ -415,7 +415,7 @@ mod tests { run_commands_in_text: false, }; - let events = output.clone().to_event_stream().collect::>().await; + let events = output.clone().into_event_stream().collect::>().await; let events = events .into_iter() .filter_map(|event| event.ok()) @@ -452,7 +452,7 @@ mod tests { ); let new_output = - SlashCommandOutput::from_event_stream(output.clone().to_event_stream()) + SlashCommandOutput::from_event_stream(output.clone().into_event_stream()) .await .unwrap(); @@ -493,7 +493,7 @@ mod tests { run_commands_in_text: false, }; - let events = output.clone().to_event_stream().collect::>().await; + let events = output.clone().into_event_stream().collect::>().await; let events = events .into_iter() .filter_map(|event| event.ok()) @@ -562,7 +562,7 @@ mod tests { ); let new_output = - SlashCommandOutput::from_event_stream(output.clone().to_event_stream()) + SlashCommandOutput::from_event_stream(output.clone().into_event_stream()) .await .unwrap(); diff --git a/crates/assistant_slash_command/src/extension_slash_command.rs b/crates/assistant_slash_command/src/extension_slash_command.rs index 74c46ffb5ffefb2ccbefdba8edec4e9e778489b5..e47ae52c98740af17c90fe657386bb0120773d9b 100644 --- a/crates/assistant_slash_command/src/extension_slash_command.rs +++ b/crates/assistant_slash_command/src/extension_slash_command.rs @@ -166,7 +166,7 @@ impl SlashCommand for ExtensionSlashCommand { .collect(), run_commands_in_text: false, } - .to_event_stream()) + .into_event_stream()) }) } } diff --git a/crates/assistant_slash_commands/Cargo.toml b/crates/assistant_slash_commands/Cargo.toml index f703a753f5d261f4151d0d6a47eb3753fd18afb8..c054c3ced84825bcd131bdd76644c00595c4c4a9 100644 --- a/crates/assistant_slash_commands/Cargo.toml +++ b/crates/assistant_slash_commands/Cargo.toml @@ -27,7 +27,6 @@ globset.workspace = true gpui.workspace = true html_to_markdown.workspace = true http_client.workspace = true -indexed_docs.workspace = true language.workspace = true project.workspace = true prompt_store.workspace = true diff --git a/crates/assistant_slash_commands/src/assistant_slash_commands.rs b/crates/assistant_slash_commands/src/assistant_slash_commands.rs index fa5dd8b683d4404365db252e27f9e8e30db6ca30..fb00a912197e07942a67ad92418b85c4920ad66b 100644 --- a/crates/assistant_slash_commands/src/assistant_slash_commands.rs +++ b/crates/assistant_slash_commands/src/assistant_slash_commands.rs @@ -3,7 +3,6 @@ mod context_server_command; mod default_command; mod delta_command; mod diagnostics_command; -mod docs_command; mod fetch_command; mod file_command; mod now_command; @@ -18,7 +17,6 @@ pub use crate::context_server_command::*; pub use crate::default_command::*; pub use crate::delta_command::*; pub use crate::diagnostics_command::*; -pub use crate::docs_command::*; pub use crate::fetch_command::*; pub use crate::file_command::*; pub use crate::now_command::*; diff --git a/crates/assistant_slash_commands/src/cargo_workspace_command.rs b/crates/assistant_slash_commands/src/cargo_workspace_command.rs index 8b088ea012de5f1ef6f7c787924c3cb2c6ec44c8..d58b2edc4c3dffd799dd9eb1c104686dc6488687 100644 --- a/crates/assistant_slash_commands/src/cargo_workspace_command.rs +++ b/crates/assistant_slash_commands/src/cargo_workspace_command.rs @@ -150,7 +150,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand { }], run_commands_in_text: false, } - .to_event_stream()) + .into_event_stream()) }) }); output.unwrap_or_else(|error| Task::ready(Err(error))) diff --git a/crates/assistant_slash_commands/src/context_server_command.rs b/crates/assistant_slash_commands/src/context_server_command.rs index f223d3b184ccf6d795b80caca9a6a616aafc7f33..ee0cbf54c23a595f6503162c91dd1df3be019dd5 100644 --- a/crates/assistant_slash_commands/src/context_server_command.rs +++ b/crates/assistant_slash_commands/src/context_server_command.rs @@ -39,12 +39,12 @@ impl SlashCommand for ContextServerSlashCommand { fn label(&self, cx: &App) -> language::CodeLabel { let mut parts = vec![self.prompt.name.as_str()]; - if let Some(args) = &self.prompt.arguments { - if let Some(arg) = args.first() { - parts.push(arg.name.as_str()); - } + if let Some(args) = &self.prompt.arguments + && let Some(arg) = args.first() + { + parts.push(arg.name.as_str()); } - create_label_for_command(&parts[0], &parts[1..], cx) + create_label_for_command(parts[0], &parts[1..], cx) } fn description(&self) -> String { @@ -62,9 +62,10 @@ impl SlashCommand for ContextServerSlashCommand { } fn requires_argument(&self) -> bool { - self.prompt.arguments.as_ref().map_or(false, |args| { - args.iter().any(|arg| arg.required == Some(true)) - }) + self.prompt + .arguments + .as_ref() + .is_some_and(|args| args.iter().any(|arg| arg.required == Some(true))) } fn complete_argument( @@ -190,7 +191,7 @@ impl SlashCommand for ContextServerSlashCommand { text: prompt, run_commands_in_text: false, } - .to_event_stream()) + .into_event_stream()) }) } else { Task::ready(Err(anyhow!("Context server not found"))) diff --git a/crates/assistant_slash_commands/src/default_command.rs b/crates/assistant_slash_commands/src/default_command.rs index 6fce7f07a46d3d248c1c1292a67f1ad577c43645..01eff881cff0f07db9bf34e25853432e413ed79f 100644 --- a/crates/assistant_slash_commands/src/default_command.rs +++ b/crates/assistant_slash_commands/src/default_command.rs @@ -85,7 +85,7 @@ impl SlashCommand for DefaultSlashCommand { text, run_commands_in_text: true, } - .to_event_stream()) + .into_event_stream()) }) } } diff --git a/crates/assistant_slash_commands/src/delta_command.rs b/crates/assistant_slash_commands/src/delta_command.rs index 8c840c17b2c7fe9d8c8995b21c35cb35980dd71b..ea05fca588d0a496eeb3a2d2128b3861ba8a1e30 100644 --- a/crates/assistant_slash_commands/src/delta_command.rs +++ b/crates/assistant_slash_commands/src/delta_command.rs @@ -66,23 +66,22 @@ impl SlashCommand for DeltaSlashCommand { .metadata .as_ref() .and_then(|value| serde_json::from_value::(value.clone()).ok()) + && paths.insert(metadata.path.clone()) { - if paths.insert(metadata.path.clone()) { - file_command_old_outputs.push( - context_buffer - .as_rope() - .slice(section.range.to_offset(&context_buffer)), - ); - file_command_new_outputs.push(Arc::new(FileSlashCommand).run( - std::slice::from_ref(&metadata.path), - context_slash_command_output_sections, - context_buffer.clone(), - workspace.clone(), - delegate.clone(), - window, - cx, - )); - } + file_command_old_outputs.push( + context_buffer + .as_rope() + .slice(section.range.to_offset(&context_buffer)), + ); + file_command_new_outputs.push(Arc::new(FileSlashCommand).run( + std::slice::from_ref(&metadata.path), + context_slash_command_output_sections, + context_buffer.clone(), + workspace.clone(), + delegate.clone(), + window, + cx, + )); } } @@ -95,31 +94,31 @@ impl SlashCommand for DeltaSlashCommand { .into_iter() .zip(file_command_new_outputs) { - if let Ok(new_output) = new_output { - if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await - { - if let Some(file_command_range) = new_output.sections.first() { - let new_text = &new_output.text[file_command_range.range.clone()]; - if old_text.chars().ne(new_text.chars()) { - changes_detected = true; - output.sections.extend(new_output.sections.into_iter().map( - |section| SlashCommandOutputSection { - range: output.text.len() + section.range.start - ..output.text.len() + section.range.end, - icon: section.icon, - label: section.label, - metadata: section.metadata, - }, - )); - output.text.push_str(&new_output.text); - } - } + if let Ok(new_output) = new_output + && let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await + && let Some(file_command_range) = new_output.sections.first() + { + let new_text = &new_output.text[file_command_range.range.clone()]; + if old_text.chars().ne(new_text.chars()) { + changes_detected = true; + output + .sections + .extend(new_output.sections.into_iter().map(|section| { + SlashCommandOutputSection { + range: output.text.len() + section.range.start + ..output.text.len() + section.range.end, + icon: section.icon, + label: section.label, + metadata: section.metadata, + } + })); + output.text.push_str(&new_output.text); } } } anyhow::ensure!(changes_detected, "no new changes detected"); - Ok(output.to_event_stream()) + Ok(output.into_event_stream()) }) } } diff --git a/crates/assistant_slash_commands/src/diagnostics_command.rs b/crates/assistant_slash_commands/src/diagnostics_command.rs index 2feabd8b1e018cc6495a88fe5a89276e3e19dfb1..8b1dbd515cabeb498d2a639387b426527dcda651 100644 --- a/crates/assistant_slash_commands/src/diagnostics_command.rs +++ b/crates/assistant_slash_commands/src/diagnostics_command.rs @@ -44,7 +44,7 @@ impl DiagnosticsSlashCommand { score: 0., positions: Vec::new(), worktree_id: entry.worktree_id.to_usize(), - path: entry.path.clone(), + path: entry.path, path_prefix: path_prefix.clone(), is_dir: false, // Diagnostics can't be produced for directories distance_to_relative_ancestor: 0, @@ -61,7 +61,7 @@ impl DiagnosticsSlashCommand { snapshot: worktree.snapshot(), include_ignored: worktree .root_entry() - .map_or(false, |entry| entry.is_ignored), + .is_some_and(|entry| entry.is_ignored), include_root_name: true, candidates: project::Candidates::Entries, } @@ -189,7 +189,7 @@ impl SlashCommand for DiagnosticsSlashCommand { window.spawn(cx, async move |_| { task.await? - .map(|output| output.to_event_stream()) + .map(|output| output.into_event_stream()) .context("No diagnostics found") }) } @@ -249,7 +249,7 @@ fn collect_diagnostics( let worktree = worktree.read(cx); let worktree_root_path = Path::new(worktree.root_name()); let relative_path = path.strip_prefix(worktree_root_path).ok()?; - worktree.absolutize(&relative_path).ok() + worktree.absolutize(relative_path).ok() }) }) .is_some() @@ -280,10 +280,10 @@ fn collect_diagnostics( let mut project_summary = DiagnosticSummary::default(); for (project_path, path, summary) in diagnostic_summaries { - if let Some(path_matcher) = &options.path_matcher { - if !path_matcher.is_match(&path) { - continue; - } + if let Some(path_matcher) = &options.path_matcher + && !path_matcher.is_match(&path) + { + continue; } project_summary.error_count += summary.error_count; @@ -365,7 +365,7 @@ pub fn collect_buffer_diagnostics( ) { for (_, group) in snapshot.diagnostic_groups(None) { let entry = &group.entries[group.primary_ix]; - collect_diagnostic(output, entry, &snapshot, include_warnings) + collect_diagnostic(output, entry, snapshot, include_warnings) } } @@ -396,7 +396,7 @@ fn collect_diagnostic( let start_row = range.start.row.saturating_sub(EXCERPT_EXPANSION_SIZE); let end_row = (range.end.row + EXCERPT_EXPANSION_SIZE).min(snapshot.max_point().row) + 1; let excerpt_range = - Point::new(start_row, 0).to_offset(&snapshot)..Point::new(end_row, 0).to_offset(&snapshot); + Point::new(start_row, 0).to_offset(snapshot)..Point::new(end_row, 0).to_offset(snapshot); output.text.push_str("```"); if let Some(language_name) = snapshot.language().map(|l| l.code_fence_block_name()) { diff --git a/crates/assistant_slash_commands/src/docs_command.rs b/crates/assistant_slash_commands/src/docs_command.rs deleted file mode 100644 index bd87c72849e1eb54ca782d978f319676c1e8b3fe..0000000000000000000000000000000000000000 --- a/crates/assistant_slash_commands/src/docs_command.rs +++ /dev/null @@ -1,543 +0,0 @@ -use std::path::Path; -use std::sync::Arc; -use std::sync::atomic::AtomicBool; -use std::time::Duration; - -use anyhow::{Context as _, Result, anyhow, bail}; -use assistant_slash_command::{ - ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, - SlashCommandResult, -}; -use gpui::{App, BackgroundExecutor, Entity, Task, WeakEntity}; -use indexed_docs::{ - DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName, - ProviderId, -}; -use language::{BufferSnapshot, LspAdapterDelegate}; -use project::{Project, ProjectPath}; -use ui::prelude::*; -use util::{ResultExt, maybe}; -use workspace::Workspace; - -pub struct DocsSlashCommand; - -impl DocsSlashCommand { - pub const NAME: &'static str = "docs"; - - fn path_to_cargo_toml(project: Entity, cx: &mut App) -> Option> { - let worktree = project.read(cx).worktrees(cx).next()?; - let worktree = worktree.read(cx); - let entry = worktree.entry_for_path("Cargo.toml")?; - let path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - Some(Arc::from( - project.read(cx).absolute_path(&path, cx)?.as_path(), - )) - } - - /// Ensures that the indexed doc providers for Rust are registered. - /// - /// Ideally we would do this sooner, but we need to wait until we're able to - /// access the workspace so we can read the project. - fn ensure_rust_doc_providers_are_registered( - &self, - workspace: Option>, - cx: &mut App, - ) { - let indexed_docs_registry = IndexedDocsRegistry::global(cx); - if indexed_docs_registry - .get_provider_store(LocalRustdocProvider::id()) - .is_none() - { - let index_provider_deps = maybe!({ - let workspace = workspace - .as_ref() - .context("no workspace")? - .upgrade() - .context("workspace dropped")?; - let project = workspace.read(cx).project().clone(); - let fs = project.read(cx).fs().clone(); - let cargo_workspace_root = Self::path_to_cargo_toml(project, cx) - .and_then(|path| path.parent().map(|path| path.to_path_buf())) - .context("no Cargo workspace root found")?; - - anyhow::Ok((fs, cargo_workspace_root)) - }); - - if let Some((fs, cargo_workspace_root)) = index_provider_deps.log_err() { - indexed_docs_registry.register_provider(Box::new(LocalRustdocProvider::new( - fs, - cargo_workspace_root, - ))); - } - } - - if indexed_docs_registry - .get_provider_store(DocsDotRsProvider::id()) - .is_none() - { - let http_client = maybe!({ - let workspace = workspace - .as_ref() - .context("no workspace")? - .upgrade() - .context("workspace was dropped")?; - let project = workspace.read(cx).project().clone(); - anyhow::Ok(project.read(cx).client().http_client()) - }); - - if let Some(http_client) = http_client.log_err() { - indexed_docs_registry - .register_provider(Box::new(DocsDotRsProvider::new(http_client))); - } - } - } - - /// Runs just-in-time indexing for a given package, in case the slash command - /// is run without any entries existing in the index. - fn run_just_in_time_indexing( - store: Arc, - key: String, - package: PackageName, - executor: BackgroundExecutor, - ) -> Task<()> { - executor.clone().spawn(async move { - let (prefix, needs_full_index) = if let Some((prefix, _)) = key.split_once('*') { - // If we have a wildcard in the search, we want to wait until - // we've completely finished indexing so we get a full set of - // results for the wildcard. - (prefix.to_string(), true) - } else { - (key, false) - }; - - // If we already have some entries, we assume that we've indexed the package before - // and don't need to do it again. - let has_any_entries = store - .any_with_prefix(prefix.clone()) - .await - .unwrap_or_default(); - if has_any_entries { - return (); - }; - - let index_task = store.clone().index(package.clone()); - - if needs_full_index { - _ = index_task.await; - } else { - loop { - executor.timer(Duration::from_millis(200)).await; - - if store - .any_with_prefix(prefix.clone()) - .await - .unwrap_or_default() - || !store.is_indexing(&package) - { - break; - } - } - } - }) - } -} - -impl SlashCommand for DocsSlashCommand { - fn name(&self) -> String { - Self::NAME.into() - } - - fn description(&self) -> String { - "insert docs".into() - } - - fn menu_text(&self) -> String { - "Insert Documentation".into() - } - - fn requires_argument(&self) -> bool { - true - } - - fn complete_argument( - self: Arc, - arguments: &[String], - _cancel: Arc, - workspace: Option>, - _: &mut Window, - cx: &mut App, - ) -> Task>> { - self.ensure_rust_doc_providers_are_registered(workspace, cx); - - let indexed_docs_registry = IndexedDocsRegistry::global(cx); - let args = DocsSlashCommandArgs::parse(arguments); - let store = args - .provider() - .context("no docs provider specified") - .and_then(|provider| IndexedDocsStore::try_global(provider, cx)); - cx.background_spawn(async move { - fn build_completions(items: Vec) -> Vec { - items - .into_iter() - .map(|item| ArgumentCompletion { - label: item.clone().into(), - new_text: item.to_string(), - after_completion: assistant_slash_command::AfterCompletion::Run, - replace_previous_arguments: false, - }) - .collect() - } - - match args { - DocsSlashCommandArgs::NoProvider => { - let providers = indexed_docs_registry.list_providers(); - if providers.is_empty() { - return Ok(vec![ArgumentCompletion { - label: "No available docs providers.".into(), - new_text: String::new(), - after_completion: false.into(), - replace_previous_arguments: false, - }]); - } - - Ok(providers - .into_iter() - .map(|provider| ArgumentCompletion { - label: provider.to_string().into(), - new_text: provider.to_string(), - after_completion: false.into(), - replace_previous_arguments: false, - }) - .collect()) - } - DocsSlashCommandArgs::SearchPackageDocs { - provider, - package, - index, - } => { - let store = store?; - - if index { - // We don't need to hold onto this task, as the `IndexedDocsStore` will hold it - // until it completes. - drop(store.clone().index(package.as_str().into())); - } - - let suggested_packages = store.clone().suggest_packages().await?; - let search_results = store.search(package).await; - - let mut items = build_completions(search_results); - let workspace_crate_completions = suggested_packages - .into_iter() - .filter(|package_name| { - !items - .iter() - .any(|item| item.label.text() == package_name.as_ref()) - }) - .map(|package_name| ArgumentCompletion { - label: format!("{package_name} (unindexed)").into(), - new_text: format!("{package_name}"), - after_completion: true.into(), - replace_previous_arguments: false, - }) - .collect::>(); - items.extend(workspace_crate_completions); - - if items.is_empty() { - return Ok(vec![ArgumentCompletion { - label: format!( - "Enter a {package_term} name.", - package_term = package_term(&provider) - ) - .into(), - new_text: provider.to_string(), - after_completion: false.into(), - replace_previous_arguments: false, - }]); - } - - Ok(items) - } - DocsSlashCommandArgs::SearchItemDocs { item_path, .. } => { - let store = store?; - let items = store.search(item_path).await; - Ok(build_completions(items)) - } - } - }) - } - - fn run( - self: Arc, - arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, - _workspace: WeakEntity, - _delegate: Option>, - _: &mut Window, - cx: &mut App, - ) -> Task { - if arguments.is_empty() { - return Task::ready(Err(anyhow!("missing an argument"))); - }; - - let args = DocsSlashCommandArgs::parse(arguments); - let executor = cx.background_executor().clone(); - let task = cx.background_spawn({ - let store = args - .provider() - .context("no docs provider specified") - .and_then(|provider| IndexedDocsStore::try_global(provider, cx)); - async move { - let (provider, key) = match args.clone() { - DocsSlashCommandArgs::NoProvider => bail!("no docs provider specified"), - DocsSlashCommandArgs::SearchPackageDocs { - provider, package, .. - } => (provider, package), - DocsSlashCommandArgs::SearchItemDocs { - provider, - item_path, - .. - } => (provider, item_path), - }; - - if key.trim().is_empty() { - bail!( - "no {package_term} name provided", - package_term = package_term(&provider) - ); - } - - let store = store?; - - if let Some(package) = args.package() { - Self::run_just_in_time_indexing(store.clone(), key.clone(), package, executor) - .await; - } - - let (text, ranges) = if let Some((prefix, _)) = key.split_once('*') { - let docs = store.load_many_by_prefix(prefix.to_string()).await?; - - let mut text = String::new(); - let mut ranges = Vec::new(); - - for (key, docs) in docs { - let prev_len = text.len(); - - text.push_str(&docs.0); - text.push_str("\n"); - ranges.push((key, prev_len..text.len())); - text.push_str("\n"); - } - - (text, ranges) - } else { - let item_docs = store.load(key.clone()).await?; - let text = item_docs.to_string(); - let range = 0..text.len(); - - (text, vec![(key, range)]) - }; - - anyhow::Ok((provider, text, ranges)) - } - }); - - cx.foreground_executor().spawn(async move { - let (provider, text, ranges) = task.await?; - Ok(SlashCommandOutput { - text, - sections: ranges - .into_iter() - .map(|(key, range)| SlashCommandOutputSection { - range, - icon: IconName::FileDoc, - label: format!("docs ({provider}): {key}",).into(), - metadata: None, - }) - .collect(), - run_commands_in_text: false, - } - .to_event_stream()) - }) - } -} - -fn is_item_path_delimiter(char: char) -> bool { - !char.is_alphanumeric() && char != '-' && char != '_' -} - -#[derive(Debug, PartialEq, Clone)] -pub enum DocsSlashCommandArgs { - NoProvider, - SearchPackageDocs { - provider: ProviderId, - package: String, - index: bool, - }, - SearchItemDocs { - provider: ProviderId, - package: String, - item_path: String, - }, -} - -impl DocsSlashCommandArgs { - pub fn parse(arguments: &[String]) -> Self { - let Some(provider) = arguments - .get(0) - .cloned() - .filter(|arg| !arg.trim().is_empty()) - else { - return Self::NoProvider; - }; - let provider = ProviderId(provider.into()); - let Some(argument) = arguments.get(1) else { - return Self::NoProvider; - }; - - if let Some((package, rest)) = argument.split_once(is_item_path_delimiter) { - if rest.trim().is_empty() { - Self::SearchPackageDocs { - provider, - package: package.to_owned(), - index: true, - } - } else { - Self::SearchItemDocs { - provider, - package: package.to_owned(), - item_path: argument.to_owned(), - } - } - } else { - Self::SearchPackageDocs { - provider, - package: argument.to_owned(), - index: false, - } - } - } - - pub fn provider(&self) -> Option { - match self { - Self::NoProvider => None, - Self::SearchPackageDocs { provider, .. } | Self::SearchItemDocs { provider, .. } => { - Some(provider.clone()) - } - } - } - - pub fn package(&self) -> Option { - match self { - Self::NoProvider => None, - Self::SearchPackageDocs { package, .. } | Self::SearchItemDocs { package, .. } => { - Some(package.as_str().into()) - } - } - } -} - -/// Returns the term used to refer to a package. -fn package_term(provider: &ProviderId) -> &'static str { - if provider == &DocsDotRsProvider::id() || provider == &LocalRustdocProvider::id() { - return "crate"; - } - - "package" -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_parse_docs_slash_command_args() { - assert_eq!( - DocsSlashCommandArgs::parse(&["".to_string()]), - DocsSlashCommandArgs::NoProvider - ); - assert_eq!( - DocsSlashCommandArgs::parse(&["rustdoc".to_string()]), - DocsSlashCommandArgs::NoProvider - ); - - assert_eq!( - DocsSlashCommandArgs::parse(&["rustdoc".to_string(), "".to_string()]), - DocsSlashCommandArgs::SearchPackageDocs { - provider: ProviderId("rustdoc".into()), - package: "".into(), - index: false - } - ); - assert_eq!( - DocsSlashCommandArgs::parse(&["gleam".to_string(), "".to_string()]), - DocsSlashCommandArgs::SearchPackageDocs { - provider: ProviderId("gleam".into()), - package: "".into(), - index: false - } - ); - - assert_eq!( - DocsSlashCommandArgs::parse(&["rustdoc".to_string(), "gpui".to_string()]), - DocsSlashCommandArgs::SearchPackageDocs { - provider: ProviderId("rustdoc".into()), - package: "gpui".into(), - index: false, - } - ); - assert_eq!( - DocsSlashCommandArgs::parse(&["gleam".to_string(), "gleam_stdlib".to_string()]), - DocsSlashCommandArgs::SearchPackageDocs { - provider: ProviderId("gleam".into()), - package: "gleam_stdlib".into(), - index: false - } - ); - - // Adding an item path delimiter indicates we can start indexing. - assert_eq!( - DocsSlashCommandArgs::parse(&["rustdoc".to_string(), "gpui:".to_string()]), - DocsSlashCommandArgs::SearchPackageDocs { - provider: ProviderId("rustdoc".into()), - package: "gpui".into(), - index: true, - } - ); - assert_eq!( - DocsSlashCommandArgs::parse(&["gleam".to_string(), "gleam_stdlib/".to_string()]), - DocsSlashCommandArgs::SearchPackageDocs { - provider: ProviderId("gleam".into()), - package: "gleam_stdlib".into(), - index: true - } - ); - - assert_eq!( - DocsSlashCommandArgs::parse(&[ - "rustdoc".to_string(), - "gpui::foo::bar::Baz".to_string() - ]), - DocsSlashCommandArgs::SearchItemDocs { - provider: ProviderId("rustdoc".into()), - package: "gpui".into(), - item_path: "gpui::foo::bar::Baz".into() - } - ); - assert_eq!( - DocsSlashCommandArgs::parse(&[ - "gleam".to_string(), - "gleam_stdlib/gleam/int".to_string() - ]), - DocsSlashCommandArgs::SearchItemDocs { - provider: ProviderId("gleam".into()), - package: "gleam_stdlib".into(), - item_path: "gleam_stdlib/gleam/int".into() - } - ); - } -} diff --git a/crates/assistant_slash_commands/src/fetch_command.rs b/crates/assistant_slash_commands/src/fetch_command.rs index 5e586d4f23aba71a23c9c550cb63bf26059ffa6e..6d3f66c9a23c896c765ba6c0a43b7a99dbc7ee73 100644 --- a/crates/assistant_slash_commands/src/fetch_command.rs +++ b/crates/assistant_slash_commands/src/fetch_command.rs @@ -112,7 +112,7 @@ impl SlashCommand for FetchSlashCommand { } fn icon(&self) -> IconName { - IconName::Globe + IconName::ToolWeb } fn menu_text(&self) -> String { @@ -171,13 +171,13 @@ impl SlashCommand for FetchSlashCommand { text, sections: vec![SlashCommandOutputSection { range, - icon: IconName::Globe, + icon: IconName::ToolWeb, label: format!("fetch {}", url).into(), metadata: None, }], run_commands_in_text: false, } - .to_event_stream()) + .into_event_stream()) }) } } diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index c913ccc0f199cb5d03cf0a91d67459f3728b55a9..a973d653e4527be808618f76d60af59e4a891947 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -92,7 +92,7 @@ impl FileSlashCommand { snapshot: worktree.snapshot(), include_ignored: worktree .root_entry() - .map_or(false, |entry| entry.is_ignored), + .is_some_and(|entry| entry.is_ignored), include_root_name: true, candidates: project::Candidates::Entries, } @@ -223,7 +223,7 @@ fn collect_files( cx: &mut App, ) -> impl Stream> + use<> { let Ok(matchers) = glob_inputs - .into_iter() + .iter() .map(|glob_input| { custom_path_matcher::PathMatcher::new(&[glob_input.to_owned()]) .with_context(|| format!("invalid path {glob_input}")) @@ -371,7 +371,7 @@ fn collect_files( &mut output, ) .log_err(); - let mut buffer_events = output.to_event_stream(); + let mut buffer_events = output.into_event_stream(); while let Some(event) = buffer_events.next().await { events_tx.unbounded_send(event)?; } @@ -379,7 +379,7 @@ fn collect_files( } } - while let Some(_) = directory_stack.pop() { + while directory_stack.pop().is_some() { events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection))?; } } @@ -491,7 +491,7 @@ mod custom_path_matcher { impl PathMatcher { pub fn new(globs: &[String]) -> Result { let globs = globs - .into_iter() + .iter() .map(|glob| Glob::new(&SanitizedPath::from(glob).to_glob_string())) .collect::, _>>()?; let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect(); @@ -536,7 +536,7 @@ mod custom_path_matcher { let path_str = path.to_string_lossy(); let separator = std::path::MAIN_SEPARATOR_STR; if path_str.ends_with(separator) { - return false; + false } else { self.glob.is_match(path_str.to_string() + separator) } diff --git a/crates/assistant_slash_commands/src/now_command.rs b/crates/assistant_slash_commands/src/now_command.rs index e4abef2a7c80fbdc96df28cbd1072d180fd864f3..aec21e7173bafd4cb07e7c37135fa0ad6fa88812 100644 --- a/crates/assistant_slash_commands/src/now_command.rs +++ b/crates/assistant_slash_commands/src/now_command.rs @@ -66,6 +66,6 @@ impl SlashCommand for NowSlashCommand { }], run_commands_in_text: false, } - .to_event_stream())) + .into_event_stream())) } } diff --git a/crates/assistant_slash_commands/src/prompt_command.rs b/crates/assistant_slash_commands/src/prompt_command.rs index c177f9f3599525924aa18700ea09d5fe977a5698..bbd6d3e3ad201c06940d6dc986616f61c8e15547 100644 --- a/crates/assistant_slash_commands/src/prompt_command.rs +++ b/crates/assistant_slash_commands/src/prompt_command.rs @@ -80,7 +80,7 @@ impl SlashCommand for PromptSlashCommand { }; let store = PromptStore::global(cx); - let title = SharedString::from(title.clone()); + let title = SharedString::from(title); let prompt = cx.spawn({ let title = title.clone(); async move |cx| { @@ -117,7 +117,7 @@ impl SlashCommand for PromptSlashCommand { }], run_commands_in_text: true, } - .to_event_stream()) + .into_event_stream()) }) } } diff --git a/crates/assistant_slash_commands/src/symbols_command.rs b/crates/assistant_slash_commands/src/symbols_command.rs index ef9314643116689d36e99b2a9bcb7d69982a776f..30287091444db391056a77664890229df95a1d46 100644 --- a/crates/assistant_slash_commands/src/symbols_command.rs +++ b/crates/assistant_slash_commands/src/symbols_command.rs @@ -92,7 +92,7 @@ impl SlashCommand for OutlineSlashCommand { text: outline_text, run_commands_in_text: false, } - .to_event_stream()) + .into_event_stream()) }) }); diff --git a/crates/assistant_slash_commands/src/tab_command.rs b/crates/assistant_slash_commands/src/tab_command.rs index ca7601bc4c3a48d9d9c352ad545d72c032e7c47e..a124beed6302d6c67085ccb70f4c3aa58834d3f2 100644 --- a/crates/assistant_slash_commands/src/tab_command.rs +++ b/crates/assistant_slash_commands/src/tab_command.rs @@ -157,7 +157,7 @@ impl SlashCommand for TabSlashCommand { for (full_path, buffer, _) in tab_items_search.await? { append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err(); } - Ok(output.to_event_stream()) + Ok(output.into_event_stream()) }) } } @@ -195,16 +195,14 @@ fn tab_items_for_queries( } for editor in workspace.items_of_type::(cx) { - if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() { - if let Some(timestamp) = + if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() + && let Some(timestamp) = timestamps_by_entity_id.get(&editor.entity_id()) - { - if visited_buffers.insert(buffer.read(cx).remote_id()) { - let snapshot = buffer.read(cx).snapshot(); - let full_path = snapshot.resolve_file_path(cx, true); - open_buffers.push((full_path, snapshot, *timestamp)); - } - } + && visited_buffers.insert(buffer.read(cx).remote_id()) + { + let snapshot = buffer.read(cx).snapshot(); + let full_path = snapshot.resolve_file_path(cx, true); + open_buffers.push((full_path, snapshot, *timestamp)); } } diff --git a/crates/assistant_tool/Cargo.toml b/crates/assistant_tool/Cargo.toml index acbe674b02cfe31a08f63e01f7dae1a2448c453e..c95695052a4778209010b2f9e7a4a57be4cb6cf7 100644 --- a/crates/assistant_tool/Cargo.toml +++ b/crates/assistant_tool/Cargo.toml @@ -12,12 +12,10 @@ workspace = true path = "src/assistant_tool.rs" [dependencies] +action_log.workspace = true anyhow.workspace = true -buffer_diff.workspace = true -clock.workspace = true collections.workspace = true derive_more.workspace = true -futures.workspace = true gpui.workspace = true icons.workspace = true language.workspace = true @@ -30,7 +28,6 @@ serde.workspace = true serde_json.workspace = true text.workspace = true util.workspace = true -watch.workspace = true workspace.workspace = true workspace-hack.workspace = true diff --git a/crates/assistant_tool/src/assistant_tool.rs b/crates/assistant_tool/src/assistant_tool.rs index 22cbaac3f8b0df95df3c14a6237092cf83ae35ac..9c5825d0f0ecc9c31277bfff5123d3d80501511b 100644 --- a/crates/assistant_tool/src/assistant_tool.rs +++ b/crates/assistant_tool/src/assistant_tool.rs @@ -1,4 +1,3 @@ -mod action_log; pub mod outline; mod tool_registry; mod tool_schema; @@ -10,6 +9,7 @@ use std::fmt::Formatter; use std::ops::Deref; use std::sync::Arc; +use action_log::ActionLog; use anyhow::Result; use gpui::AnyElement; use gpui::AnyWindowHandle; @@ -25,7 +25,6 @@ use language_model::LanguageModelToolSchemaFormat; use project::Project; use workspace::Workspace; -pub use crate::action_log::*; pub use crate::tool_registry::*; pub use crate::tool_schema::*; pub use crate::tool_working_set::*; diff --git a/crates/assistant_tool/src/outline.rs b/crates/assistant_tool/src/outline.rs index 6af204d79ab53e626cb92bee68db6e346bb2250a..4f8bde5456073912185fe160d48363eac7601ef5 100644 --- a/crates/assistant_tool/src/outline.rs +++ b/crates/assistant_tool/src/outline.rs @@ -1,4 +1,4 @@ -use crate::ActionLog; +use action_log::ActionLog; use anyhow::{Context as _, Result}; use gpui::{AsyncApp, Entity}; use language::{OutlineItem, ParseStatus}; diff --git a/crates/assistant_tool/src/tool_schema.rs b/crates/assistant_tool/src/tool_schema.rs index 7b48f93ba6d23bcc1a6e2cf051737efaf69fa595..192f7c8a2bb565ece01a3472a9e46dad316377f4 100644 --- a/crates/assistant_tool/src/tool_schema.rs +++ b/crates/assistant_tool/src/tool_schema.rs @@ -24,16 +24,16 @@ pub fn adapt_schema_to_format( fn preprocess_json_schema(json: &mut Value) -> Result<()> { // `additionalProperties` defaults to `false` unless explicitly specified. // This prevents models from hallucinating tool parameters. - if let Value::Object(obj) = json { - if matches!(obj.get("type"), Some(Value::String(s)) if s == "object") { - if !obj.contains_key("additionalProperties") { - obj.insert("additionalProperties".to_string(), Value::Bool(false)); - } + if let Value::Object(obj) = json + && matches!(obj.get("type"), Some(Value::String(s)) if s == "object") + { + if !obj.contains_key("additionalProperties") { + obj.insert("additionalProperties".to_string(), Value::Bool(false)); + } - // OpenAI API requires non-missing `properties` - if !obj.contains_key("properties") { - obj.insert("properties".to_string(), Value::Object(Default::default())); - } + // OpenAI API requires non-missing `properties` + if !obj.contains_key("properties") { + obj.insert("properties".to_string(), Value::Object(Default::default())); } } Ok(()) @@ -59,10 +59,10 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> { ("optional", |value| value.is_boolean()), ]; for (key, predicate) in KEYS_TO_REMOVE { - if let Some(value) = obj.get(key) { - if predicate(value) { - obj.remove(key); - } + if let Some(value) = obj.get(key) + && predicate(value) + { + obj.remove(key); } } @@ -77,12 +77,12 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> { } // Handle oneOf -> anyOf conversion - if let Some(subschemas) = obj.get_mut("oneOf") { - if subschemas.is_array() { - let subschemas_clone = subschemas.clone(); - obj.remove("oneOf"); - obj.insert("anyOf".to_string(), subschemas_clone); - } + if let Some(subschemas) = obj.get_mut("oneOf") + && subschemas.is_array() + { + let subschemas_clone = subschemas.clone(); + obj.remove("oneOf"); + obj.insert("anyOf".to_string(), subschemas_clone); } // Recursively process all nested objects and arrays diff --git a/crates/assistant_tool/src/tool_working_set.rs b/crates/assistant_tool/src/tool_working_set.rs index c0a358917b499908d85fbc157212cf6db5b5e0eb..61f57affc76aad9e4d2185665b539f9092e3491c 100644 --- a/crates/assistant_tool/src/tool_working_set.rs +++ b/crates/assistant_tool/src/tool_working_set.rs @@ -156,13 +156,13 @@ fn resolve_context_server_tool_name_conflicts( if duplicated_tool_names.is_empty() { return context_server_tools - .into_iter() + .iter() .map(|tool| (resolve_tool_name(tool).into(), tool.clone())) .collect(); } context_server_tools - .into_iter() + .iter() .filter_map(|tool| { let mut tool_name = resolve_tool_name(tool); if !duplicated_tool_names.contains(&tool_name) { diff --git a/crates/assistant_tools/Cargo.toml b/crates/assistant_tools/Cargo.toml index d4b8fa3afc3dc3311599a2d9e3e97f2984ebde40..5a8ca8a5e995fd2c738eb3b309f2bb4ebe9595a1 100644 --- a/crates/assistant_tools/Cargo.toml +++ b/crates/assistant_tools/Cargo.toml @@ -15,6 +15,7 @@ path = "src/assistant_tools.rs" eval = [] [dependencies] +action_log.workspace = true agent_settings.workspace = true anyhow.workspace = true assistant_tool.workspace = true diff --git a/crates/assistant_tools/src/assistant_tools.rs b/crates/assistant_tools/src/assistant_tools.rs index 90bb2e9b7c0d937ef4cb0d844f02e90babbe819f..ce3b639cb2c46d3f736490c0b2153260f970963c 100644 --- a/crates/assistant_tools/src/assistant_tools.rs +++ b/crates/assistant_tools/src/assistant_tools.rs @@ -2,7 +2,7 @@ mod copy_path_tool; mod create_directory_tool; mod delete_path_tool; mod diagnostics_tool; -mod edit_agent; +pub mod edit_agent; mod edit_file_tool; mod fetch_tool; mod find_path_tool; @@ -14,7 +14,7 @@ mod open_tool; mod project_notifications_tool; mod read_file_tool; mod schema; -mod templates; +pub mod templates; mod terminal_tool; mod thinking_tool; mod ui; @@ -72,11 +72,10 @@ pub fn init(http_client: Arc, cx: &mut App) { register_web_search_tool(&LanguageModelRegistry::global(cx), cx); cx.subscribe( &LanguageModelRegistry::global(cx), - move |registry, event, cx| match event { - language_model::Event::DefaultModelChanged => { + move |registry, event, cx| { + if let language_model::Event::DefaultModelChanged = event { register_web_search_tool(®istry, cx); } - _ => {} }, ) .detach(); @@ -86,7 +85,7 @@ fn register_web_search_tool(registry: &Entity, cx: &mut A let using_zed_provider = registry .read(cx) .default_model() - .map_or(false, |default| default.is_provided_by_zed()); + .is_some_and(|default| default.is_provided_by_zed()); if using_zed_provider { ToolRegistry::global(cx).register_tool(WebSearchTool); } else { diff --git a/crates/assistant_tools/src/copy_path_tool.rs b/crates/assistant_tools/src/copy_path_tool.rs index e34ae9ff9305689593241b45fe986414a211ec3b..c56a864bd45efd83d605607962f6103f8da7d1da 100644 --- a/crates/assistant_tools/src/copy_path_tool.rs +++ b/crates/assistant_tools/src/copy_path_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::AnyWindowHandle; use gpui::{App, AppContext, Entity, Task}; use language_model::LanguageModel; diff --git a/crates/assistant_tools/src/create_directory_tool.rs b/crates/assistant_tools/src/create_directory_tool.rs index 11d969d234228e32a0f4baff2c9cad055a488993..85eea463dc1dfd429dd70ded8c18faf6ee8421c5 100644 --- a/crates/assistant_tools/src/create_directory_tool.rs +++ b/crates/assistant_tools/src/create_directory_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::AnyWindowHandle; use gpui::{App, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; diff --git a/crates/assistant_tools/src/delete_path_tool.rs b/crates/assistant_tools/src/delete_path_tool.rs index 9e69c18b65d2f78618ac54b28c4808401c08bd72..b181eeff5ca0f1a45176921ed9e24973aae3839f 100644 --- a/crates/assistant_tools/src/delete_path_tool.rs +++ b/crates/assistant_tools/src/delete_path_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use futures::{SinkExt, StreamExt, channel::mpsc}; use gpui::{AnyWindowHandle, App, AppContext, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; diff --git a/crates/assistant_tools/src/diagnostics_tool.rs b/crates/assistant_tools/src/diagnostics_tool.rs index 12ab97f820d89e2d66deba3b58ab388b7f1c886e..4ec794e12783746e4e330e79f0c0cb14c84f5d2e 100644 --- a/crates/assistant_tools/src/diagnostics_tool.rs +++ b/crates/assistant_tools/src/diagnostics_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, Entity, Task}; use language::{DiagnosticSeverity, OffsetRangeExt}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; @@ -85,7 +86,7 @@ impl Tool for DiagnosticsTool { input: serde_json::Value, _request: Arc, project: Entity, - action_log: Entity, + _action_log: Entity, _model: Arc, _window: Option, cx: &mut App, @@ -158,10 +159,6 @@ impl Tool for DiagnosticsTool { } } - action_log.update(cx, |action_log, _cx| { - action_log.checked_project_diagnostics(); - }); - if has_diagnostics { Task::ready(Ok(output.into())).into() } else { diff --git a/crates/assistant_tools/src/edit_agent.rs b/crates/assistant_tools/src/edit_agent.rs index 715d106a267e4013955c29613262cde9b92d0a8a..665ece2baaeed0dac32e5c0153ec1d79fef47f12 100644 --- a/crates/assistant_tools/src/edit_agent.rs +++ b/crates/assistant_tools/src/edit_agent.rs @@ -5,8 +5,8 @@ mod evals; mod streaming_fuzzy_matcher; use crate::{Template, Templates}; +use action_log::ActionLog; use anyhow::Result; -use assistant_tool::ActionLog; use cloud_llm_client::CompletionIntent; use create_file_parser::{CreateFileParser, CreateFileParserEvent}; pub use edit_parser::EditFormat; @@ -29,7 +29,6 @@ use serde::{Deserialize, Serialize}; use std::{cmp, iter, mem, ops::Range, path::PathBuf, pin::Pin, sync::Arc, task::Poll}; use streaming_diff::{CharOperation, StreamingDiff}; use streaming_fuzzy_matcher::StreamingFuzzyMatcher; -use util::debug_panic; #[derive(Serialize)] struct CreateFilePromptTemplate { @@ -66,7 +65,7 @@ pub enum EditAgentOutputEvent { ResolvingEditRange(Range), UnresolvedEditRange, AmbiguousEditRange(Vec>), - Edited, + Edited(Range), } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] @@ -179,7 +178,9 @@ impl EditAgent { ) }); output_events_tx - .unbounded_send(EditAgentOutputEvent::Edited) + .unbounded_send(EditAgentOutputEvent::Edited( + language::Anchor::MIN..language::Anchor::MAX, + )) .ok(); })?; @@ -201,7 +202,9 @@ impl EditAgent { }); })?; output_events_tx - .unbounded_send(EditAgentOutputEvent::Edited) + .unbounded_send(EditAgentOutputEvent::Edited( + language::Anchor::MIN..language::Anchor::MAX, + )) .ok(); } } @@ -337,8 +340,8 @@ impl EditAgent { // Edit the buffer and report edits to the action log as part of the // same effect cycle, otherwise the edit will be reported as if the // user made it. - cx.update(|cx| { - let max_edit_end = buffer.update(cx, |buffer, cx| { + let (min_edit_start, max_edit_end) = cx.update(|cx| { + let (min_edit_start, max_edit_end) = buffer.update(cx, |buffer, cx| { buffer.edit(edits.iter().cloned(), None, cx); let max_edit_end = buffer .summaries_for_anchors::( @@ -346,7 +349,16 @@ impl EditAgent { ) .max() .unwrap(); - buffer.anchor_before(max_edit_end) + let min_edit_start = buffer + .summaries_for_anchors::( + edits.iter().map(|(range, _)| &range.start), + ) + .min() + .unwrap(); + ( + buffer.anchor_after(min_edit_start), + buffer.anchor_before(max_edit_end), + ) }); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); @@ -359,9 +371,10 @@ impl EditAgent { cx, ); }); + (min_edit_start, max_edit_end) })?; output_events - .unbounded_send(EditAgentOutputEvent::Edited) + .unbounded_send(EditAgentOutputEvent::Edited(min_edit_start..max_edit_end)) .ok(); } @@ -659,34 +672,30 @@ impl EditAgent { cx: &mut AsyncApp, ) -> Result>> { let mut messages_iter = conversation.messages.iter_mut(); - if let Some(last_message) = messages_iter.next_back() { - if last_message.role == Role::Assistant { - let old_content_len = last_message.content.len(); - last_message - .content - .retain(|content| !matches!(content, MessageContent::ToolUse(_))); - let new_content_len = last_message.content.len(); - - // We just removed pending tool uses from the content of the - // last message, so it doesn't make sense to cache it anymore - // (e.g., the message will look very different on the next - // request). Thus, we move the flag to the message prior to it, - // as it will still be a valid prefix of the conversation. - if old_content_len != new_content_len && last_message.cache { - if let Some(prev_message) = messages_iter.next_back() { - last_message.cache = false; - prev_message.cache = true; - } - } + if let Some(last_message) = messages_iter.next_back() + && last_message.role == Role::Assistant + { + let old_content_len = last_message.content.len(); + last_message + .content + .retain(|content| !matches!(content, MessageContent::ToolUse(_))); + let new_content_len = last_message.content.len(); + + // We just removed pending tool uses from the content of the + // last message, so it doesn't make sense to cache it anymore + // (e.g., the message will look very different on the next + // request). Thus, we move the flag to the message prior to it, + // as it will still be a valid prefix of the conversation. + if old_content_len != new_content_len + && last_message.cache + && let Some(prev_message) = messages_iter.next_back() + { + last_message.cache = false; + prev_message.cache = true; + } - if last_message.content.is_empty() { - conversation.messages.pop(); - } - } else { - debug_panic!( - "Last message must be an Assistant tool calling! Got {:?}", - last_message.content - ); + if last_message.content.is_empty() { + conversation.messages.pop(); } } @@ -761,6 +770,7 @@ mod tests { use gpui::{AppContext, TestAppContext}; use indoc::indoc; use language_model::fake_provider::FakeLanguageModel; + use pretty_assertions::assert_matches; use project::{AgentLocation, Project}; use rand::prelude::*; use rand::rngs::StdRng; @@ -998,7 +1008,10 @@ mod tests { model.send_last_completion_stream_text_chunk("abX"); cx.run_until_parked(); - assert_eq!(drain_events(&mut events), [EditAgentOutputEvent::Edited]); + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited(_)] + ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), "abXc\ndef\nghi\njkl" @@ -1013,7 +1026,10 @@ mod tests { model.send_last_completion_stream_text_chunk("cY"); cx.run_until_parked(); - assert_eq!(drain_events(&mut events), [EditAgentOutputEvent::Edited]); + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited { .. }] + ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), "abXcY\ndef\nghi\njkl" @@ -1124,9 +1140,9 @@ mod tests { model.send_last_completion_stream_text_chunk("GHI"); cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::Edited] + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited { .. }] ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), @@ -1171,9 +1187,9 @@ mod tests { ); cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::Edited] + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited(_)] ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), @@ -1189,9 +1205,9 @@ mod tests { chunks_tx.unbounded_send("```\njkl\n").unwrap(); cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::Edited] + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited { .. }] ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), @@ -1207,9 +1223,9 @@ mod tests { chunks_tx.unbounded_send("mno\n").unwrap(); cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::Edited] + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited { .. }] ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), @@ -1225,9 +1241,9 @@ mod tests { chunks_tx.unbounded_send("pqr\n```").unwrap(); cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::Edited] + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited(_)], ); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), diff --git a/crates/assistant_tools/src/edit_agent/create_file_parser.rs b/crates/assistant_tools/src/edit_agent/create_file_parser.rs index 07c8fac7b9d4a0346f8f90c9906086df1b1fd849..0aad9ecb87c1426486b531ac4291913cd0d74092 100644 --- a/crates/assistant_tools/src/edit_agent/create_file_parser.rs +++ b/crates/assistant_tools/src/edit_agent/create_file_parser.rs @@ -1,10 +1,11 @@ +use std::sync::OnceLock; + use regex::Regex; use smallvec::SmallVec; -use std::cell::LazyCell; use util::debug_panic; -const START_MARKER: LazyCell = LazyCell::new(|| Regex::new(r"\n?```\S*\n").unwrap()); -const END_MARKER: LazyCell = LazyCell::new(|| Regex::new(r"(^|\n)```\s*$").unwrap()); +static START_MARKER: OnceLock = OnceLock::new(); +static END_MARKER: OnceLock = OnceLock::new(); #[derive(Debug)] pub enum CreateFileParserEvent { @@ -43,10 +44,12 @@ impl CreateFileParser { self.buffer.push_str(chunk); let mut edit_events = SmallVec::new(); + let start_marker_regex = START_MARKER.get_or_init(|| Regex::new(r"\n?```\S*\n").unwrap()); + let end_marker_regex = END_MARKER.get_or_init(|| Regex::new(r"(^|\n)```\s*$").unwrap()); loop { match &mut self.state { ParserState::Pending => { - if let Some(m) = START_MARKER.find(&self.buffer) { + if let Some(m) = start_marker_regex.find(&self.buffer) { self.buffer.drain(..m.end()); self.state = ParserState::WithinText; } else { @@ -65,7 +68,7 @@ impl CreateFileParser { break; } ParserState::Finishing => { - if let Some(m) = END_MARKER.find(&self.buffer) { + if let Some(m) = end_marker_regex.find(&self.buffer) { self.buffer.drain(m.start()..); } if !self.buffer.is_empty() { diff --git a/crates/assistant_tools/src/edit_agent/evals.rs b/crates/assistant_tools/src/edit_agent/evals.rs index 9a8e7624559e9a1284ace7c932f428c7389b6254..4f182b31481c5d855b59f4398e104d0eea05bc74 100644 --- a/crates/assistant_tools/src/edit_agent/evals.rs +++ b/crates/assistant_tools/src/edit_agent/evals.rs @@ -1153,8 +1153,7 @@ impl EvalInput { .expect("Conversation must end with an edit_file tool use") .clone(); - let edit_file_input: EditFileToolInput = - serde_json::from_value(tool_use.input.clone()).unwrap(); + let edit_file_input: EditFileToolInput = serde_json::from_value(tool_use.input).unwrap(); EvalInput { conversation, @@ -1283,14 +1282,14 @@ impl EvalAssertion { // Parse the score from the response let re = regex::Regex::new(r"(\d+)").unwrap(); - if let Some(captures) = re.captures(&output) { - if let Some(score_match) = captures.get(1) { - let score = score_match.as_str().parse().unwrap_or(0); - return Ok(EvalAssertionOutcome { - score, - message: Some(output), - }); - } + if let Some(captures) = re.captures(&output) + && let Some(score_match) = captures.get(1) + { + let score = score_match.as_str().parse().unwrap_or(0); + return Ok(EvalAssertionOutcome { + score, + message: Some(output), + }); } anyhow::bail!("No score found in response. Raw output: {output}"); @@ -1460,7 +1459,7 @@ impl EditAgentTest { async fn new(cx: &mut TestAppContext) -> Self { cx.executor().allow_parking(); - let fs = FakeFs::new(cx.executor().clone()); + let fs = FakeFs::new(cx.executor()); cx.update(|cx| { settings::init(cx); gpui_tokio::init(cx); @@ -1475,7 +1474,7 @@ impl EditAgentTest { Project::init_settings(cx); language::init(cx); language_model::init(client.clone(), cx); - language_models::init(user_store.clone(), client.clone(), cx); + language_models::init(user_store, client.clone(), cx); crate::init(client.http_client(), cx); }); @@ -1586,7 +1585,7 @@ impl EditAgentTest { let has_system_prompt = eval .conversation .first() - .map_or(false, |msg| msg.role == Role::System); + .is_some_and(|msg| msg.role == Role::System); let messages = if has_system_prompt { eval.conversation } else { diff --git a/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs index 092bdce8b347ee5bcb5849703533710652b5b01c..33b37679f0a345ef070942057b307bd377012d05 100644 --- a/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs +++ b/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs @@ -319,7 +319,7 @@ mod tests { ); let snapshot = buffer.snapshot(); - let mut finder = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut finder = StreamingFuzzyMatcher::new(snapshot); assert_eq!(push(&mut finder, ""), None); assert_eq!(finish(finder), None); } @@ -333,7 +333,7 @@ mod tests { ); let snapshot = buffer.snapshot(); - let mut finder = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut finder = StreamingFuzzyMatcher::new(snapshot); // Push partial query assert_eq!(push(&mut finder, "This"), None); @@ -365,7 +365,7 @@ mod tests { ); let snapshot = buffer.snapshot(); - let mut finder = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut finder = StreamingFuzzyMatcher::new(snapshot); // Push a fuzzy query that should match the first function assert_eq!( @@ -391,7 +391,7 @@ mod tests { ); let snapshot = buffer.snapshot(); - let mut finder = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut finder = StreamingFuzzyMatcher::new(snapshot); // No match initially assert_eq!(push(&mut finder, "Lin"), None); @@ -420,7 +420,7 @@ mod tests { ); let snapshot = buffer.snapshot(); - let mut finder = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut finder = StreamingFuzzyMatcher::new(snapshot); // Push text in small chunks across line boundaries assert_eq!(push(&mut finder, "jumps "), None); // No newline yet @@ -458,7 +458,7 @@ mod tests { ); let snapshot = buffer.snapshot(); - let mut finder = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut finder = StreamingFuzzyMatcher::new(snapshot); assert_eq!( push(&mut finder, "impl Debug for User {\n"), @@ -711,7 +711,7 @@ mod tests { "Expected to match `second_function` based on the line hint" ); - let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut matcher = StreamingFuzzyMatcher::new(snapshot); matcher.push(query, None); matcher.finish(); let best_match = matcher.select_best_match(); @@ -727,7 +727,7 @@ mod tests { let buffer = TextBuffer::new(0, BufferId::new(1).unwrap(), text.clone()); let snapshot = buffer.snapshot(); - let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); + let mut matcher = StreamingFuzzyMatcher::new(snapshot); // Split query into random chunks let chunks = to_random_chunks(rng, query); @@ -794,10 +794,8 @@ mod tests { fn finish(mut finder: StreamingFuzzyMatcher) -> Option { let snapshot = finder.snapshot.clone(); let matches = finder.finish(); - if let Some(range) = matches.first() { - Some(snapshot.text_for_range(range.clone()).collect::()) - } else { - None - } + matches + .first() + .map(|range| snapshot.text_for_range(range.clone()).collect::()) } } diff --git a/crates/assistant_tools/src/edit_file_tool.rs b/crates/assistant_tools/src/edit_file_tool.rs index dce9f49abdde7e0ea00d976a4d9f029f98f7a067..95b01c40eb96472caf85f239b2212f25e06fe9e2 100644 --- a/crates/assistant_tools/src/edit_file_tool.rs +++ b/crates/assistant_tools/src/edit_file_tool.rs @@ -4,11 +4,11 @@ use crate::{ schema::json_schema_for, ui::{COLLAPSED_LINES, ToolOutputPreview}, }; +use action_log::ActionLog; use agent_settings; use anyhow::{Context as _, Result, anyhow}; use assistant_tool::{ - ActionLog, AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, - ToolUseStatus, + AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, }; use buffer_diff::{BufferDiff, BufferDiffSnapshot}; use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey}; @@ -155,10 +155,10 @@ impl Tool for EditFileTool { // It's also possible that the global config dir is configured to be inside the project, // so check for that edge case too. - if let Ok(canonical_path) = std::fs::canonicalize(&input.path) { - if canonical_path.starts_with(paths::config_dir()) { - return true; - } + if let Ok(canonical_path) = std::fs::canonicalize(&input.path) + && canonical_path.starts_with(paths::config_dir()) + { + return true; } // Check if path is inside the global config directory @@ -199,10 +199,10 @@ impl Tool for EditFileTool { .any(|c| c.as_os_str() == local_settings_folder.as_os_str()) { description.push_str(" (local settings)"); - } else if let Ok(canonical_path) = std::fs::canonicalize(&input.path) { - if canonical_path.starts_with(paths::config_dir()) { - description.push_str(" (global settings)"); - } + } else if let Ok(canonical_path) = std::fs::canonicalize(&input.path) + && canonical_path.starts_with(paths::config_dir()) + { + description.push_str(" (global settings)"); } description @@ -307,7 +307,7 @@ impl Tool for EditFileTool { let mut ambiguous_ranges = Vec::new(); while let Some(event) = events.next().await { match event { - EditAgentOutputEvent::Edited => { + EditAgentOutputEvent::Edited { .. } => { if let Some(card) = card_clone.as_ref() { card.update(cx, |card, cx| card.update_diff(cx))?; } @@ -376,7 +376,7 @@ impl Tool for EditFileTool { let output = EditFileToolOutput { original_path: project_path.path.to_path_buf(), - new_text: new_text.clone(), + new_text, old_text, raw_output: Some(agent_output), }; @@ -536,7 +536,7 @@ fn resolve_path( let parent_entry = parent_project_path .as_ref() - .and_then(|path| project.entry_for_path(&path, cx)) + .and_then(|path| project.entry_for_path(path, cx)) .context("Can't create file: parent directory doesn't exist")?; anyhow::ensure!( @@ -643,7 +643,7 @@ impl EditFileToolCard { diff }); - self.buffer = Some(buffer.clone()); + self.buffer = Some(buffer); self.base_text = Some(base_text.into()); self.buffer_diff = Some(buffer_diff.clone()); @@ -723,13 +723,13 @@ impl EditFileToolCard { let buffer = buffer.read(cx); let diff = diff.read(cx); let mut ranges = diff - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, cx) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(&buffer)) + .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) .collect::>(); ranges.extend( self.revealed_ranges .iter() - .map(|range| range.to_point(&buffer)), + .map(|range| range.to_point(buffer)), ); ranges.sort_unstable_by_key(|range| (range.start, Reverse(range.end))); @@ -776,7 +776,6 @@ impl EditFileToolCard { let buffer_diff = cx.spawn({ let buffer = buffer.clone(); - let language_registry = language_registry.clone(); async move |_this, cx| { build_buffer_diff(base_text, &buffer, &language_registry, cx).await } @@ -857,13 +856,12 @@ impl ToolCard for EditFileToolCard { ) .child( Icon::new(IconName::ArrowUpRight) - .size(IconSize::XSmall) + .size(IconSize::Small) .color(Color::Ignored), ), ) .on_click({ let path = self.path.clone(); - let workspace = workspace.clone(); move |_, window, cx| { workspace .update(cx, { @@ -1356,8 +1354,7 @@ mod tests { mode: mode.clone(), }; - let result = cx.update(|cx| resolve_path(&input, project, cx)); - result + cx.update(|cx| resolve_path(&input, project, cx)) } fn assert_resolved_path_eq(path: anyhow::Result, expected: &str) { diff --git a/crates/assistant_tools/src/fetch_tool.rs b/crates/assistant_tools/src/fetch_tool.rs index a31ec39268d7afcf6072a783c5faa40f2a2e0f78..79e205f205d02ba2a3f977163d2296423f71d9da 100644 --- a/crates/assistant_tools/src/fetch_tool.rs +++ b/crates/assistant_tools/src/fetch_tool.rs @@ -3,8 +3,9 @@ use std::sync::Arc; use std::{borrow::Cow, cell::RefCell}; use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow, bail}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use futures::AsyncReadExt as _; use gpui::{AnyWindowHandle, App, AppContext as _, Entity, Task}; use html_to_markdown::{TagHandler, convert_html_to_markdown, markdown}; diff --git a/crates/assistant_tools/src/find_path_tool.rs b/crates/assistant_tools/src/find_path_tool.rs index affc01941735e5e97b3c3a509cacbce5fd3c7264..ac2c7a32abc0a3768caee85cfd25ab109f03aab3 100644 --- a/crates/assistant_tools/src/find_path_tool.rs +++ b/crates/assistant_tools/src/find_path_tool.rs @@ -1,7 +1,8 @@ use crate::{schema::json_schema_for, ui::ToolCallCardHeader}; +use action_log::ActionLog; use anyhow::{Result, anyhow}; use assistant_tool::{ - ActionLog, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, + Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, }; use editor::Editor; use futures::channel::oneshot::{self, Receiver}; @@ -233,7 +234,7 @@ impl ToolCard for FindPathToolCard { workspace: WeakEntity, cx: &mut Context, ) -> impl IntoElement { - let matches_label: SharedString = if self.paths.len() == 0 { + let matches_label: SharedString = if self.paths.is_empty() { "No matches".into() } else if self.paths.len() == 1 { "1 match".into() @@ -257,7 +258,7 @@ impl ToolCard for FindPathToolCard { Button::new(("path", index), button_label) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_position(IconPosition::End) .label_size(LabelSize::Small) .color(Color::Muted) diff --git a/crates/assistant_tools/src/grep_tool.rs b/crates/assistant_tools/src/grep_tool.rs index 43c3d1d9904e486a9a4309ff24a9e6d4be0dfdca..41dde5bbfe49ad5bd82fdb5072c14f5728c518c5 100644 --- a/crates/assistant_tools/src/grep_tool.rs +++ b/crates/assistant_tools/src/grep_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use futures::StreamExt; use gpui::{AnyWindowHandle, App, Entity, Task}; use language::{OffsetRangeExt, ParseStatus, Point}; @@ -187,15 +188,14 @@ impl Tool for GrepTool { // Check if this file should be excluded based on its worktree settings if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| { project.find_project_path(&path, cx) - }) { - if cx.update(|cx| { + }) + && cx.update(|cx| { let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); worktree_settings.is_path_excluded(&project_path.path) || worktree_settings.is_path_private(&project_path.path) }).unwrap_or(false) { continue; } - } while *parse_status.borrow() != ParseStatus::Idle { parse_status.changed().await?; @@ -283,12 +283,11 @@ impl Tool for GrepTool { output.extend(snapshot.text_for_range(range)); output.push_str("\n```\n"); - if let Some(ancestor_range) = ancestor_range { - if end_row < ancestor_range.end.row { + if let Some(ancestor_range) = ancestor_range + && end_row < ancestor_range.end.row { let remaining_lines = ancestor_range.end.row - end_row; writeln!(output, "\n{} lines remaining in ancestor node. Read the file to see all.", remaining_lines)?; } - } matches_found += 1; } @@ -328,7 +327,7 @@ mod tests { init_test(cx); cx.executor().allow_parking(); - let fs = FakeFs::new(cx.executor().clone()); + let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/root"), serde_json::json!({ @@ -416,7 +415,7 @@ mod tests { init_test(cx); cx.executor().allow_parking(); - let fs = FakeFs::new(cx.executor().clone()); + let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/root"), serde_json::json!({ @@ -495,7 +494,7 @@ mod tests { init_test(cx); cx.executor().allow_parking(); - let fs = FakeFs::new(cx.executor().clone()); + let fs = FakeFs::new(cx.executor()); // Create test file with syntax structures fs.insert_tree( @@ -893,7 +892,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not find files outside the project worktree" @@ -919,7 +918,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.iter().any(|p| p.contains("allowed_file.rs")), "grep_tool should be able to search files inside worktrees" @@ -945,7 +944,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not search files in .secretdir (file_scan_exclusions)" @@ -970,7 +969,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not search .mymetadata files (file_scan_exclusions)" @@ -996,7 +995,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not search .mysecrets (private_files)" @@ -1021,7 +1020,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not search .privatekey files (private_files)" @@ -1046,7 +1045,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not search .mysensitive files (private_files)" @@ -1072,7 +1071,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.iter().any(|p| p.contains("normal_file.rs")), "Should be able to search normal files" @@ -1099,7 +1098,7 @@ mod tests { }) .await; let results = result.unwrap(); - let paths = extract_paths_from_results(&results.content.as_str().unwrap()); + let paths = extract_paths_from_results(results.content.as_str().unwrap()); assert!( paths.is_empty(), "grep_tool should not allow escaping project boundaries with relative paths" @@ -1205,7 +1204,7 @@ mod tests { .unwrap(); let content = result.content.as_str().unwrap(); - let paths = extract_paths_from_results(&content); + let paths = extract_paths_from_results(content); // Should find matches in non-private files assert!( @@ -1270,7 +1269,7 @@ mod tests { .unwrap(); let content = result.content.as_str().unwrap(); - let paths = extract_paths_from_results(&content); + let paths = extract_paths_from_results(content); // Should only find matches in worktree1 *.rs files (excluding private ones) assert!( diff --git a/crates/assistant_tools/src/list_directory_tool.rs b/crates/assistant_tools/src/list_directory_tool.rs index b1980615d677894264ce2f785068b9e99cb55a61..5471d8923b557ac26d06a16c90fdeffb152049d1 100644 --- a/crates/assistant_tools/src/list_directory_tool.rs +++ b/crates/assistant_tools/src/list_directory_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; use project::{Project, WorktreeSettings}; diff --git a/crates/assistant_tools/src/move_path_tool.rs b/crates/assistant_tools/src/move_path_tool.rs index c1cbbf848d53d4e0341bad84fbc7d8cf90f142ac..2c065488cea62a73e04c34a659961abc7b94ba54 100644 --- a/crates/assistant_tools/src/move_path_tool.rs +++ b/crates/assistant_tools/src/move_path_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, AppContext, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; use project::Project; diff --git a/crates/assistant_tools/src/now_tool.rs b/crates/assistant_tools/src/now_tool.rs index b51b91d3d51b6cc15e54faab55be50287815d96c..f50ad065d1cd320aa1a82e4ce17f744d6b04be2c 100644 --- a/crates/assistant_tools/src/now_tool.rs +++ b/crates/assistant_tools/src/now_tool.rs @@ -1,8 +1,9 @@ use std::sync::Arc; use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use chrono::{Local, Utc}; use gpui::{AnyWindowHandle, App, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; diff --git a/crates/assistant_tools/src/open_tool.rs b/crates/assistant_tools/src/open_tool.rs index 8fddbb0431aee7c8d9d7508c535b598887998225..6dbf66749b932804df6e00fed726360f0492c7f3 100644 --- a/crates/assistant_tools/src/open_tool.rs +++ b/crates/assistant_tools/src/open_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, AppContext, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; use project::Project; diff --git a/crates/assistant_tools/src/project_notifications_tool.rs b/crates/assistant_tools/src/project_notifications_tool.rs index 03487e5419002f0fe08458c49e325f7202612d29..e30d80207dae4de1e69efe99724a2a5343b57664 100644 --- a/crates/assistant_tools/src/project_notifications_tool.rs +++ b/crates/assistant_tools/src/project_notifications_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::Result; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; use project::Project; @@ -80,7 +81,7 @@ fn fit_patch_to_size(patch: &str, max_size: usize) -> String { // Compression level 1: remove context lines in diff bodies, but // leave the counts and positions of inserted/deleted lines let mut current_size = patch.len(); - let mut file_patches = split_patch(&patch); + let mut file_patches = split_patch(patch); file_patches.sort_by_key(|patch| patch.len()); let compressed_patches = file_patches .iter() diff --git a/crates/assistant_tools/src/read_file_tool.rs b/crates/assistant_tools/src/read_file_tool.rs index ee38273cc04338180d36eb1b64e78dd0235ccfa0..766ee3b1611ccc9093fbe9299644413877afd991 100644 --- a/crates/assistant_tools/src/read_file_tool.rs +++ b/crates/assistant_tools/src/read_file_tool.rs @@ -1,6 +1,7 @@ use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use assistant_tool::{ToolResultContent, outline}; use gpui::{AnyWindowHandle, App, Entity, Task}; use project::{ImageItem, image_store}; @@ -200,7 +201,7 @@ impl Tool for ReadFileTool { buffer .file() .as_ref() - .map_or(true, |file| !file.disk_state().exists()) + .is_none_or(|file| !file.disk_state().exists()) })? { anyhow::bail!("{file_path} not found"); } @@ -286,7 +287,7 @@ impl Tool for ReadFileTool { Using the line numbers in this outline, you can call this tool again while specifying the start_line and end_line fields to see the implementations of symbols in the outline. - + Alternatively, you can fall back to the `grep` tool (if available) to search the file for specific content." } diff --git a/crates/assistant_tools/src/schema.rs b/crates/assistant_tools/src/schema.rs index 10a8bf0acd99131d2c0a80411072f312c9a42f50..dab7384efd8ba23669db645c87dcf79e95538d3a 100644 --- a/crates/assistant_tools/src/schema.rs +++ b/crates/assistant_tools/src/schema.rs @@ -43,12 +43,11 @@ impl Transform for ToJsonSchemaSubsetTransform { fn transform(&mut self, schema: &mut Schema) { // Ensure that the type field is not an array, this happens when we use // Option, the type will be [T, "null"]. - if let Some(type_field) = schema.get_mut("type") { - if let Some(types) = type_field.as_array() { - if let Some(first_type) = types.first() { - *type_field = first_type.clone(); - } - } + if let Some(type_field) = schema.get_mut("type") + && let Some(types) = type_field.as_array() + && let Some(first_type) = types.first() + { + *type_field = first_type.clone(); } // oneOf is not supported, use anyOf instead diff --git a/crates/assistant_tools/src/terminal_tool.rs b/crates/assistant_tools/src/terminal_tool.rs index 58833c520848aa3bb345db0af7cccf70429784ea..b28e55e78aef40554a8ebe60108bd81da3f9d95a 100644 --- a/crates/assistant_tools/src/terminal_tool.rs +++ b/crates/assistant_tools/src/terminal_tool.rs @@ -2,9 +2,10 @@ use crate::{ schema::json_schema_for, ui::{COLLAPSED_LINES, ToolOutputPreview}, }; +use action_log::ActionLog; use agent_settings; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus}; +use assistant_tool::{Tool, ToolCard, ToolResult, ToolUseStatus}; use futures::{FutureExt as _, future::Shared}; use gpui::{ Animation, AnimationExt, AnyWindowHandle, App, AppContext, Empty, Entity, EntityId, Task, @@ -58,12 +59,9 @@ impl TerminalTool { } if which::which("bash").is_ok() { - log::info!("agent selected bash for terminal tool"); "bash".into() } else { - let shell = get_system_shell(); - log::info!("agent selected {shell} for terminal tool"); - shell + get_system_shell() } }); Self { @@ -104,7 +102,7 @@ impl Tool for TerminalTool { let first_line = lines.next().unwrap_or_default(); let remaining_line_count = lines.count(); match remaining_line_count { - 0 => MarkdownInlineCode(&first_line).to_string(), + 0 => MarkdownInlineCode(first_line).to_string(), 1 => MarkdownInlineCode(&format!( "{} - {} more line", first_line, remaining_line_count @@ -215,7 +213,8 @@ impl Tool for TerminalTool { async move |cx| { let program = program.await; let env = env.await; - let terminal = project + + project .update(cx, |project, cx| { project.create_terminal( TerminalKind::Task(task::SpawnInTerminal { @@ -225,12 +224,10 @@ impl Tool for TerminalTool { env, ..Default::default() }), - window, cx, ) })? - .await; - terminal + .await } }); @@ -353,7 +350,7 @@ fn process_content( if is_empty { "Command executed successfully.".to_string() } else { - content.to_string() + content } } Some(exit_status) => { @@ -387,7 +384,7 @@ fn working_dir( let project = project.read(cx); let cd = &input.cd; - if cd == "." || cd == "" { + if cd == "." || cd.is_empty() { // Accept "." or "" as meaning "the one worktree" if we only have one worktree. let mut worktrees = project.worktrees(cx); @@ -412,10 +409,8 @@ fn working_dir( { return Ok(Some(input_path.into())); } - } else { - if let Some(worktree) = project.worktree_for_root_name(cd, cx) { - return Ok(Some(worktree.read(cx).abs_path().to_path_buf())); - } + } else if let Some(worktree) = project.worktree_for_root_name(cd, cx) { + return Ok(Some(worktree.read(cx).abs_path().to_path_buf())); } anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees."); diff --git a/crates/assistant_tools/src/thinking_tool.rs b/crates/assistant_tools/src/thinking_tool.rs index 76c6e6c0bad745609386421ea7d720f9d6fefa07..17ce4afc2eeeff8c6f37834cd9e8c4ff71e7cd70 100644 --- a/crates/assistant_tools/src/thinking_tool.rs +++ b/crates/assistant_tools/src/thinking_tool.rs @@ -1,8 +1,9 @@ use std::sync::Arc; use crate::schema::json_schema_for; +use action_log::ActionLog; use anyhow::{Result, anyhow}; -use assistant_tool::{ActionLog, Tool, ToolResult}; +use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; use project::Project; diff --git a/crates/assistant_tools/src/ui/tool_call_card_header.rs b/crates/assistant_tools/src/ui/tool_call_card_header.rs index b71453373feb84d91168576a5bc7c22f8d883aa9..b41f19432f99685cf745f684228169b53939fffb 100644 --- a/crates/assistant_tools/src/ui/tool_call_card_header.rs +++ b/crates/assistant_tools/src/ui/tool_call_card_header.rs @@ -101,14 +101,11 @@ impl RenderOnce for ToolCallCardHeader { }) .when_some(secondary_text, |this, secondary_text| { this.child(bullet_divider()) - .child(div().text_size(font_size).child(secondary_text.clone())) + .child(div().text_size(font_size).child(secondary_text)) }) .when_some(code_path, |this, code_path| { - this.child(bullet_divider()).child( - Label::new(code_path.clone()) - .size(LabelSize::Small) - .inline_code(cx), - ) + this.child(bullet_divider()) + .child(Label::new(code_path).size(LabelSize::Small).inline_code(cx)) }) .with_animation( "loading-label", diff --git a/crates/assistant_tools/src/web_search_tool.rs b/crates/assistant_tools/src/web_search_tool.rs index d4a12f22c56796c5dfbb2f1935e2ad7646ce7c5a..dbcca0a1f6f2d5f679fd240a5bfe64c6c9705256 100644 --- a/crates/assistant_tools/src/web_search_tool.rs +++ b/crates/assistant_tools/src/web_search_tool.rs @@ -2,9 +2,10 @@ use std::{sync::Arc, time::Duration}; use crate::schema::json_schema_for; use crate::ui::ToolCallCardHeader; +use action_log::ActionLog; use anyhow::{Context as _, Result, anyhow}; use assistant_tool::{ - ActionLog, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, + Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, }; use cloud_llm_client::{WebSearchResponse, WebSearchResult}; use futures::{Future, FutureExt, TryFutureExt}; @@ -45,7 +46,7 @@ impl Tool for WebSearchTool { } fn icon(&self) -> IconName { - IconName::Globe + IconName::ToolWeb } fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { @@ -177,7 +178,7 @@ impl ToolCard for WebSearchToolCard { .label_size(LabelSize::Small) .color(Color::Muted) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_position(IconPosition::End) .truncate(true) .tooltip({ @@ -192,10 +193,7 @@ impl ToolCard for WebSearchToolCard { ) } }) - .on_click({ - let url = url.clone(); - move |_, _, cx| cx.open_url(&url) - }) + .on_click(move |_, _, cx| cx.open_url(&url)) })) .into_any(), ), diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index d857a3eb2f6c112b9d6a9851715718047f72ccbf..ae7eb52fd377b315151525e7d501c4a33454476f 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -15,9 +15,10 @@ doctest = false [dependencies] anyhow.workspace = true collections.workspace = true -derive_more.workspace = true gpui.workspace = true -parking_lot.workspace = true -rodio = { version = "0.21.1", default-features = false, features = ["wav", "playback", "tracing"] } +settings.workspace = true +schemars.workspace = true +serde.workspace = true +rodio = { workspace = true, features = [ "wav", "playback", "tracing" ] } util.workspace = true workspace-hack.workspace = true diff --git a/crates/audio/src/assets.rs b/crates/audio/src/assets.rs deleted file mode 100644 index fd5c935d875960f4fd9bf30494301f4811b22448..0000000000000000000000000000000000000000 --- a/crates/audio/src/assets.rs +++ /dev/null @@ -1,54 +0,0 @@ -use std::{io::Cursor, sync::Arc}; - -use anyhow::{Context as _, Result}; -use collections::HashMap; -use gpui::{App, AssetSource, Global}; -use rodio::{Decoder, Source, source::Buffered}; - -type Sound = Buffered>>>; - -pub struct SoundRegistry { - cache: Arc>>, - assets: Box, -} - -struct GlobalSoundRegistry(Arc); - -impl Global for GlobalSoundRegistry {} - -impl SoundRegistry { - pub fn new(source: impl AssetSource) -> Arc { - Arc::new(Self { - cache: Default::default(), - assets: Box::new(source), - }) - } - - pub fn global(cx: &App) -> Arc { - cx.global::().0.clone() - } - - pub(crate) fn set_global(source: impl AssetSource, cx: &mut App) { - cx.set_global(GlobalSoundRegistry(SoundRegistry::new(source))); - } - - pub fn get(&self, name: &str) -> Result + use<>> { - if let Some(wav) = self.cache.lock().get(name) { - return Ok(wav.clone()); - } - - let path = format!("sounds/{}.wav", name); - let bytes = self - .assets - .load(&path)? - .map(anyhow::Ok) - .with_context(|| format!("No asset available for path {path}"))?? - .into_owned(); - let cursor = Cursor::new(bytes); - let source = Decoder::new(cursor)?.buffered(); - - self.cache.lock().insert(name.to_string(), source.clone()); - - Ok(source) - } -} diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs index 44baa16aa20a3e4b7651744974cfc085dcde7fb1..b4f2c24fef119318b7b499c9b1a8501171f9084a 100644 --- a/crates/audio/src/audio.rs +++ b/crates/audio/src/audio.rs @@ -1,16 +1,19 @@ -use assets::SoundRegistry; -use derive_more::{Deref, DerefMut}; -use gpui::{App, AssetSource, BorrowAppContext, Global}; -use rodio::{OutputStream, OutputStreamBuilder}; +use anyhow::{Context as _, Result, anyhow}; +use collections::HashMap; +use gpui::{App, BorrowAppContext, Global}; +use rodio::{Decoder, OutputStream, OutputStreamBuilder, Source, source::Buffered}; +use settings::Settings; +use std::io::Cursor; use util::ResultExt; -mod assets; +mod audio_settings; +pub use audio_settings::AudioSettings; -pub fn init(source: impl AssetSource, cx: &mut App) { - SoundRegistry::set_global(source, cx); - cx.set_global(GlobalAudio(Audio::new())); +pub fn init(cx: &mut App) { + AudioSettings::register(cx); } +#[derive(Copy, Clone, Eq, Hash, PartialEq)] pub enum Sound { Joined, Leave, @@ -38,18 +41,12 @@ impl Sound { #[derive(Default)] pub struct Audio { output_handle: Option, + source_cache: HashMap>>>>, } -#[derive(Deref, DerefMut)] -struct GlobalAudio(Audio); - -impl Global for GlobalAudio {} +impl Global for Audio {} impl Audio { - pub fn new() -> Self { - Self::default() - } - fn ensure_output_exists(&mut self) -> Option<&OutputStream> { if self.output_handle.is_none() { self.output_handle = OutputStreamBuilder::open_default_stream().log_err(); @@ -58,26 +55,51 @@ impl Audio { self.output_handle.as_ref() } - pub fn play_sound(sound: Sound, cx: &mut App) { - if !cx.has_global::() { - return; - } + pub fn play_source( + source: impl rodio::Source + Send + 'static, + cx: &mut App, + ) -> anyhow::Result<()> { + cx.update_default_global(|this: &mut Self, _cx| { + let output_handle = this + .ensure_output_exists() + .ok_or_else(|| anyhow!("Could not open audio output"))?; + output_handle.mixer().add(source); + Ok(()) + }) + } - cx.update_global::(|this, cx| { + pub fn play_sound(sound: Sound, cx: &mut App) { + cx.update_default_global(|this: &mut Self, cx| { + let source = this.sound_source(sound, cx).log_err()?; let output_handle = this.ensure_output_exists()?; - let source = SoundRegistry::global(cx).get(sound.file()).log_err()?; output_handle.mixer().add(source); Some(()) }); } pub fn end_call(cx: &mut App) { - if !cx.has_global::() { - return; - } - - cx.update_global::(|this, _| { + cx.update_default_global(|this: &mut Self, _cx| { this.output_handle.take(); }); } + + fn sound_source(&mut self, sound: Sound, cx: &App) -> Result> { + if let Some(wav) = self.source_cache.get(&sound) { + return Ok(wav.clone()); + } + + let path = format!("sounds/{}.wav", sound.file()); + let bytes = cx + .asset_source() + .load(&path)? + .map(anyhow::Ok) + .with_context(|| format!("No asset available for path {path}"))?? + .into_owned(); + let cursor = Cursor::new(bytes); + let source = Decoder::new(cursor)?.buffered(); + + self.source_cache.insert(sound, source.clone()); + + Ok(source) + } } diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..807179881c7c3b27aad2e3142a84c730951eb709 --- /dev/null +++ b/crates/audio/src/audio_settings.rs @@ -0,0 +1,33 @@ +use anyhow::Result; +use gpui::App; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug)] +pub struct AudioSettings { + /// Opt into the new audio system. + #[serde(rename = "experimental.rodio_audio", default)] + pub rodio_audio: bool, // default is false +} + +/// Configuration of audio in Zed. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] +pub struct AudioSettingsContent { + /// Whether to use the experimental audio system + #[serde(rename = "experimental.rodio_audio", default)] + pub rodio_audio: bool, +} + +impl Settings for AudioSettings { + const KEY: Option<&'static str> = Some("audio"); + + type FileContent = AudioSettingsContent; + + fn load(sources: SettingsSources, _cx: &mut App) -> Result { + sources.json_merge() + } + + fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {} +} diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 074aaa6fea7033134bd0cc35d87bc0951e25b663..2150873cadd0a84b4a2894ebbe373d9bd0e007f0 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -59,16 +59,9 @@ pub enum VersionCheckType { pub enum AutoUpdateStatus { Idle, Checking, - Downloading { - version: VersionCheckType, - }, - Installing { - version: VersionCheckType, - }, - Updated { - binary_path: PathBuf, - version: VersionCheckType, - }, + Downloading { version: VersionCheckType }, + Installing { version: VersionCheckType }, + Updated { version: VersionCheckType }, Errored, } @@ -83,6 +76,7 @@ pub struct AutoUpdater { current_version: SemanticVersion, http_client: Arc, pending_poll: Option>>, + quit_subscription: Option, } #[derive(Deserialize, Clone, Debug)] @@ -164,7 +158,7 @@ pub fn init(http_client: Arc, cx: &mut App) { AutoUpdateSetting::register(cx); cx.observe_new(|workspace: &mut Workspace, _window, _cx| { - workspace.register_action(|_, action: &Check, window, cx| check(action, window, cx)); + workspace.register_action(|_, action, window, cx| check(action, window, cx)); workspace.register_action(|_, action, _, cx| { view_release_notes(action, cx); @@ -174,7 +168,7 @@ pub fn init(http_client: Arc, cx: &mut App) { let version = release_channel::AppVersion::global(cx); let auto_updater = cx.new(|cx| { - let updater = AutoUpdater::new(version, http_client); + let updater = AutoUpdater::new(version, http_client, cx); let poll_for_updates = ReleaseChannel::try_global(cx) .map(|channel| channel.poll_for_updates()) @@ -321,12 +315,34 @@ impl AutoUpdater { cx.default_global::().0.clone() } - fn new(current_version: SemanticVersion, http_client: Arc) -> Self { + fn new( + current_version: SemanticVersion, + http_client: Arc, + cx: &mut Context, + ) -> Self { + // On windows, executable files cannot be overwritten while they are + // running, so we must wait to overwrite the application until quitting + // or restarting. When quitting the app, we spawn the auto update helper + // to finish the auto update process after Zed exits. When restarting + // the app after an update, we use `set_restart_path` to run the auto + // update helper instead of the app, so that it can overwrite the app + // and then spawn the new binary. + let quit_subscription = Some(cx.on_app_quit(|_, _| async move { + #[cfg(target_os = "windows")] + finalize_auto_update_on_quit(); + })); + + cx.on_app_restart(|this, _| { + this.quit_subscription.take(); + }) + .detach(); + Self { status: AutoUpdateStatus::Idle, current_version, http_client, pending_poll: None, + quit_subscription, } } @@ -527,7 +543,7 @@ impl AutoUpdater { async fn update(this: Entity, mut cx: AsyncApp) -> Result<()> { let (client, installed_version, previous_status, release_channel) = - this.read_with(&mut cx, |this, cx| { + this.read_with(&cx, |this, cx| { ( this.http_client.clone(), this.current_version, @@ -536,6 +552,8 @@ impl AutoUpdater { ) })?; + Self::check_dependencies()?; + this.update(&mut cx, |this, cx| { this.status = AutoUpdateStatus::Checking; cx.notify(); @@ -582,13 +600,15 @@ impl AutoUpdater { cx.notify(); })?; - let binary_path = Self::binary_path(installer_dir, target_path, &cx).await?; + let new_binary_path = Self::install_release(installer_dir, target_path, &cx).await?; + if let Some(new_binary_path) = new_binary_path { + cx.update(|cx| cx.set_restart_path(new_binary_path))?; + } this.update(&mut cx, |this, cx| { this.set_should_show_update_notification(true, cx) .detach_and_log_err(cx); this.status = AutoUpdateStatus::Updated { - binary_path, version: newer_version, }; cx.notify(); @@ -639,6 +659,15 @@ impl AutoUpdater { } } + fn check_dependencies() -> Result<()> { + #[cfg(not(target_os = "windows"))] + anyhow::ensure!( + which::which("rsync").is_ok(), + "Aborting. Could not find rsync which is required for auto-updates." + ); + Ok(()) + } + async fn target_path(installer_dir: &InstallerDir) -> Result { let filename = match OS { "macos" => anyhow::Ok("Zed.dmg"), @@ -647,20 +676,14 @@ impl AutoUpdater { unsupported_os => anyhow::bail!("not supported: {unsupported_os}"), }?; - #[cfg(not(target_os = "windows"))] - anyhow::ensure!( - which::which("rsync").is_ok(), - "Aborting. Could not find rsync which is required for auto-updates." - ); - Ok(installer_dir.path().join(filename)) } - async fn binary_path( + async fn install_release( installer_dir: InstallerDir, target_path: PathBuf, cx: &AsyncApp, - ) -> Result { + ) -> Result> { match OS { "macos" => install_release_macos(&installer_dir, target_path, cx).await, "linux" => install_release_linux(&installer_dir, target_path, cx).await, @@ -801,7 +824,7 @@ async fn install_release_linux( temp_dir: &InstallerDir, downloaded_tar_gz: PathBuf, cx: &AsyncApp, -) -> Result { +) -> Result> { let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?; let home_dir = PathBuf::from(env::var("HOME").context("no HOME env var set")?); let running_app_path = cx.update(|cx| cx.app_path())??; @@ -861,14 +884,14 @@ async fn install_release_linux( String::from_utf8_lossy(&output.stderr) ); - Ok(to.join(expected_suffix)) + Ok(Some(to.join(expected_suffix))) } async fn install_release_macos( temp_dir: &InstallerDir, downloaded_dmg: PathBuf, cx: &AsyncApp, -) -> Result { +) -> Result> { let running_app_path = cx.update(|cx| cx.app_path())??; let running_app_filename = running_app_path .file_name() @@ -910,10 +933,10 @@ async fn install_release_macos( String::from_utf8_lossy(&output.stderr) ); - Ok(running_app_path) + Ok(None) } -async fn install_release_windows(downloaded_installer: PathBuf) -> Result { +async fn install_release_windows(downloaded_installer: PathBuf) -> Result> { let output = Command::new(downloaded_installer) .arg("/verysilent") .arg("/update=true") @@ -926,29 +949,36 @@ async fn install_release_windows(downloaded_installer: PathBuf) -> Result bool { +pub fn finalize_auto_update_on_quit() { let Some(installer_path) = std::env::current_exe() .ok() .and_then(|p| p.parent().map(|p| p.join("updates"))) else { - return false; + return; }; // The installer will create a flag file after it finishes updating let flag_file = installer_path.join("versions.txt"); - if flag_file.exists() { - if let Some(helper) = installer_path + if flag_file.exists() + && let Some(helper) = installer_path .parent() .map(|p| p.join("tools\\auto_update_helper.exe")) - { - let _ = std::process::Command::new(helper).spawn(); - return true; - } + { + let mut command = std::process::Command::new(helper); + command.arg("--launch"); + command.arg("false"); + let _ = command.spawn(); } - false } #[cfg(test)] @@ -1002,7 +1032,6 @@ mod tests { let app_commit_sha = Ok(Some("a".to_string())); let installed_version = SemanticVersion::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - binary_path: PathBuf::new(), version: VersionCheckType::Semantic(SemanticVersion::new(1, 0, 1)), }; let fetched_version = SemanticVersion::new(1, 0, 1); @@ -1024,7 +1053,6 @@ mod tests { let app_commit_sha = Ok(Some("a".to_string())); let installed_version = SemanticVersion::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - binary_path: PathBuf::new(), version: VersionCheckType::Semantic(SemanticVersion::new(1, 0, 1)), }; let fetched_version = SemanticVersion::new(1, 0, 2); @@ -1090,7 +1118,6 @@ mod tests { let app_commit_sha = Ok(Some("a".to_string())); let installed_version = SemanticVersion::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - binary_path: PathBuf::new(), version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; let fetched_sha = "b".to_string(); @@ -1112,7 +1139,6 @@ mod tests { let app_commit_sha = Ok(Some("a".to_string())); let installed_version = SemanticVersion::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - binary_path: PathBuf::new(), version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; let fetched_sha = "c".to_string(); @@ -1160,7 +1186,6 @@ mod tests { let app_commit_sha = Ok(None); let installed_version = SemanticVersion::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - binary_path: PathBuf::new(), version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; let fetched_sha = "b".to_string(); @@ -1183,7 +1208,6 @@ mod tests { let app_commit_sha = Ok(None); let installed_version = SemanticVersion::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - binary_path: PathBuf::new(), version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; let fetched_sha = "c".to_string(); diff --git a/crates/auto_update_helper/src/auto_update_helper.rs b/crates/auto_update_helper/src/auto_update_helper.rs index 7c810d87245e4cd69e8f36aad3282ff1d8978cf6..21ead701b2629960a9f2b5bc639f5d6dcdbc96c5 100644 --- a/crates/auto_update_helper/src/auto_update_helper.rs +++ b/crates/auto_update_helper/src/auto_update_helper.rs @@ -18,7 +18,7 @@ fn main() {} #[cfg(target_os = "windows")] mod windows_impl { - use std::path::Path; + use std::{borrow::Cow, path::Path}; use super::dialog::create_dialog_window; use super::updater::perform_update; @@ -37,6 +37,11 @@ mod windows_impl { pub(crate) const WM_JOB_UPDATED: u32 = WM_USER + 1; pub(crate) const WM_TERMINATE: u32 = WM_USER + 2; + #[derive(Debug, Default)] + struct Args { + launch: bool, + } + pub(crate) fn run() -> Result<()> { let helper_dir = std::env::current_exe()? .parent() @@ -51,8 +56,9 @@ mod windows_impl { log::info!("======= Starting Zed update ======="); let (tx, rx) = std::sync::mpsc::channel(); let hwnd = create_dialog_window(rx)?.0 as isize; + let args = parse_args(std::env::args().skip(1)); std::thread::spawn(move || { - let result = perform_update(app_dir.as_path(), Some(hwnd)); + let result = perform_update(app_dir.as_path(), Some(hwnd), args.launch); tx.send(result).ok(); unsafe { PostMessageW(Some(HWND(hwnd as _)), WM_TERMINATE, WPARAM(0), LPARAM(0)) }.ok(); }); @@ -77,6 +83,29 @@ mod windows_impl { Ok(()) } + fn parse_args(input: impl IntoIterator) -> Args { + let mut args: Args = Args { launch: true }; + + let mut input = input.into_iter(); + if let Some(arg) = input.next() { + let launch_arg; + + if arg == "--launch" { + launch_arg = input.next().map(Cow::Owned); + } else if let Some(rest) = arg.strip_prefix("--launch=") { + launch_arg = Some(Cow::Borrowed(rest)); + } else { + launch_arg = None; + } + + if launch_arg.as_deref() == Some("false") { + args.launch = false; + } + } + + args + } + pub(crate) fn show_error(mut content: String) { if content.len() > 600 { content.truncate(600); @@ -91,4 +120,28 @@ mod windows_impl { ) }; } + + #[cfg(test)] + mod tests { + use crate::windows_impl::parse_args; + + #[test] + fn test_parse_args() { + // launch can be specified via two separate arguments + assert!(parse_args(["--launch".into(), "true".into()]).launch); + assert!(!parse_args(["--launch".into(), "false".into()]).launch); + + // launch can be specified via one single argument + assert!(parse_args(["--launch=true".into()]).launch); + assert!(!parse_args(["--launch=false".into()]).launch); + + // launch defaults to true on no arguments + assert!(parse_args([]).launch); + + // launch defaults to true on invalid arguments + assert!(parse_args(["--launch".into()]).launch); + assert!(parse_args(["--launch=".into()]).launch); + assert!(parse_args(["--launch=invalid".into()]).launch); + } + } } diff --git a/crates/auto_update_helper/src/dialog.rs b/crates/auto_update_helper/src/dialog.rs index 010ebb487525027cdf49ceadc2bce6b6e01fff97..903ac34da227b2929705ff2af72db3770cff6532 100644 --- a/crates/auto_update_helper/src/dialog.rs +++ b/crates/auto_update_helper/src/dialog.rs @@ -72,7 +72,7 @@ pub(crate) fn create_dialog_window(receiver: Receiver>) -> Result { with_dialog_data(hwnd, |data| { - if let Ok(result) = data.borrow_mut().rx.recv() { - if let Err(e) = result { - log::error!("Failed to update Zed: {:?}", e); - show_error(format!("Error: {:?}", e)); - } + if let Ok(result) = data.borrow_mut().rx.recv() + && let Err(e) = result + { + log::error!("Failed to update Zed: {:?}", e); + show_error(format!("Error: {:?}", e)); } }); unsafe { PostQuitMessage(0) }; diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index 9ec25cd9fd72d29c586f56a9aa100b5b0542cf80..762771617609e63996685d3d96fae69135355249 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -90,11 +90,7 @@ pub(crate) const JOBS: [Job; 2] = [ std::thread::sleep(Duration::from_millis(1000)); if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") { match config.as_str() { - "err" => Err(std::io::Error::new( - std::io::ErrorKind::Other, - "Simulated error", - )) - .context("Anyhow!"), + "err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"), _ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config), } } else { @@ -105,11 +101,7 @@ pub(crate) const JOBS: [Job; 2] = [ std::thread::sleep(Duration::from_millis(1000)); if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") { match config.as_str() { - "err" => Err(std::io::Error::new( - std::io::ErrorKind::Other, - "Simulated error", - )) - .context("Anyhow!"), + "err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"), _ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config), } } else { @@ -118,7 +110,7 @@ pub(crate) const JOBS: [Job; 2] = [ }, ]; -pub(crate) fn perform_update(app_dir: &Path, hwnd: Option) -> Result<()> { +pub(crate) fn perform_update(app_dir: &Path, hwnd: Option, launch: bool) -> Result<()> { let hwnd = hwnd.map(|ptr| HWND(ptr as _)); for job in JOBS.iter() { @@ -145,9 +137,11 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option) -> Result<()> } } } - let _ = std::process::Command::new(app_dir.join("Zed.exe")) - .creation_flags(CREATE_NEW_PROCESS_GROUP.0) - .spawn(); + if launch { + let _ = std::process::Command::new(app_dir.join("Zed.exe")) + .creation_flags(CREATE_NEW_PROCESS_GROUP.0) + .spawn(); + } log::info!("Update completed successfully"); Ok(()) } @@ -159,11 +153,11 @@ mod test { #[test] fn test_perform_update() { let app_dir = std::path::Path::new("C:/"); - assert!(perform_update(app_dir, None).is_ok()); + assert!(perform_update(app_dir, None, false).is_ok()); // Simulate a timeout unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err") }; - let ret = perform_update(app_dir, None); + let ret = perform_update(app_dir, None, false); assert!(ret.is_err_and(|e| e.to_string().as_str() == "Timed out")); } } diff --git a/crates/auto_update_ui/src/auto_update_ui.rs b/crates/auto_update_ui/src/auto_update_ui.rs index 63baef1f7d178045a2a2b5c976ede9ad75adb646..7063dffd6d84a13f8aa4d08ca91a6ce85826417d 100644 --- a/crates/auto_update_ui/src/auto_update_ui.rs +++ b/crates/auto_update_ui/src/auto_update_ui.rs @@ -114,7 +114,7 @@ fn view_release_notes_locally( cx, ); workspace.add_item_to_active_pane( - Box::new(markdown_preview.clone()), + Box::new(markdown_preview), None, true, window, diff --git a/crates/bedrock/src/bedrock.rs b/crates/bedrock/src/bedrock.rs index 1c6a9bd0a1e745da1dd4577741fc7cb4cab771ad..c8315d4201a46d5ac47825ff40aed3829f191d87 100644 --- a/crates/bedrock/src/bedrock.rs +++ b/crates/bedrock/src/bedrock.rs @@ -54,11 +54,7 @@ pub async fn stream_completion( )]))); } - if request - .tools - .as_ref() - .map_or(false, |t| !t.tools.is_empty()) - { + if request.tools.as_ref().is_some_and(|t| !t.tools.is_empty()) { response = response.set_tool_config(request.tools); } diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index 8eed7497da0fea8cb0227b22885599b446e5aac0..a6b27476fe36b1143103e1acd035bda6cda15132 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -82,11 +82,12 @@ impl Render for Breadcrumbs { } text_style.color = Color::Muted.color(cx); - if index == 0 && !TabBarSettings::get_global(cx).show && active_item.is_dirty(cx) { - if let Some(styled_element) = apply_dirty_filename_style(&segment, &text_style, cx) - { - return styled_element; - } + if index == 0 + && !TabBarSettings::get_global(cx).show + && active_item.is_dirty(cx) + && let Some(styled_element) = apply_dirty_filename_style(&segment, &text_style, cx) + { + return styled_element; } StyledText::new(segment.text.replace('\n', "⏎")) @@ -231,7 +232,7 @@ fn apply_dirty_filename_style( let highlight = vec![(filename_position..text.len(), highlight_style)]; Some( StyledText::new(text) - .with_default_highlights(&text_style, highlight) + .with_default_highlights(text_style, highlight) .into_any(), ) } diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index df92754ed403256733056043b67f3fa27ce503ad..3821ad23d9bb64f631a04845eb8ff407d9b0f914 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -175,12 +175,8 @@ impl BufferDiffSnapshot { if let Some(text) = &base_text { let base_text_rope = Rope::from(text.as_str()); base_text_pair = Some((text.clone(), base_text_rope.clone())); - let snapshot = language::Buffer::build_snapshot( - base_text_rope, - language.clone(), - language_registry.clone(), - cx, - ); + let snapshot = + language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx); base_text_snapshot = cx.background_spawn(snapshot); base_text_exists = true; } else { @@ -572,14 +568,14 @@ impl BufferDiffInner { pending_range.end.column = 0; } - if pending_range == (start_point..end_point) { - if !buffer.has_edits_since_in_range( + if pending_range == (start_point..end_point) + && !buffer.has_edits_since_in_range( &pending_hunk.buffer_version, start_anchor..end_anchor, - ) { - has_pending = true; - secondary_status = pending_hunk.new_status; - } + ) + { + has_pending = true; + secondary_status = pending_hunk.new_status; } } @@ -928,7 +924,7 @@ impl BufferDiff { let new_index_text = self.inner.stage_or_unstage_hunks_impl( &self.secondary_diff.as_ref()?.read(cx).inner, stage, - &hunks, + hunks, buffer, file_exists, ); @@ -952,12 +948,12 @@ impl BufferDiff { cx: &App, ) -> Option> { let start = self - .hunks_intersecting_range(range.clone(), &buffer, cx) + .hunks_intersecting_range(range.clone(), buffer, cx) .next()? .buffer_range .start; let end = self - .hunks_intersecting_range_rev(range.clone(), &buffer) + .hunks_intersecting_range_rev(range, buffer) .next()? .buffer_range .end; @@ -1031,21 +1027,20 @@ impl BufferDiff { && state.base_text.syntax_update_count() == new_state.base_text.syntax_update_count() => { - (false, new_state.compare(&state, buffer)) + (false, new_state.compare(state, buffer)) } _ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)), }; - if let Some(secondary_changed_range) = secondary_diff_change { - if let Some(secondary_hunk_range) = - self.range_to_hunk_range(secondary_changed_range, &buffer, cx) - { - if let Some(range) = &mut changed_range { - range.start = secondary_hunk_range.start.min(&range.start, &buffer); - range.end = secondary_hunk_range.end.max(&range.end, &buffer); - } else { - changed_range = Some(secondary_hunk_range); - } + if let Some(secondary_changed_range) = secondary_diff_change + && let Some(secondary_hunk_range) = + self.range_to_hunk_range(secondary_changed_range, buffer, cx) + { + if let Some(range) = &mut changed_range { + range.start = secondary_hunk_range.start.min(&range.start, buffer); + range.end = secondary_hunk_range.end.max(&range.end, buffer); + } else { + changed_range = Some(secondary_hunk_range); } } @@ -1057,8 +1052,8 @@ impl BufferDiff { if let Some((first, last)) = state.pending_hunks.first().zip(state.pending_hunks.last()) { if let Some(range) = &mut changed_range { - range.start = range.start.min(&first.buffer_range.start, &buffer); - range.end = range.end.max(&last.buffer_range.end, &buffer); + range.start = range.start.min(&first.buffer_range.start, buffer); + range.end = range.end.max(&last.buffer_range.end, buffer); } else { changed_range = Some(first.buffer_range.start..last.buffer_range.end); } @@ -1442,7 +1437,7 @@ mod tests { .unindent(); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text.clone(), cx); + let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let mut uncommitted_diff = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); uncommitted_diff.secondary_diff = Some(Box::new(unstaged_diff)); @@ -1797,7 +1792,7 @@ mod tests { uncommitted_diff.update(cx, |diff, cx| { let hunks = diff - .hunks_intersecting_range(hunk_range.clone(), &buffer, &cx) + .hunks_intersecting_range(hunk_range.clone(), &buffer, cx) .collect::>(); for hunk in &hunks { assert_ne!( @@ -1812,7 +1807,7 @@ mod tests { .to_string(); let hunks = diff - .hunks_intersecting_range(hunk_range.clone(), &buffer, &cx) + .hunks_intersecting_range(hunk_range.clone(), &buffer, cx) .collect::>(); for hunk in &hunks { assert_eq!( @@ -1870,7 +1865,7 @@ mod tests { .to_string(); assert_eq!(new_index_text, buffer_text); - let hunk = diff.hunks(&buffer, &cx).next().unwrap(); + let hunk = diff.hunks(&buffer, cx).next().unwrap(); assert_eq!( hunk.secondary_status, DiffHunkSecondaryStatus::SecondaryHunkRemovalPending @@ -1882,7 +1877,7 @@ mod tests { .to_string(); assert_eq!(index_text, head_text); - let hunk = diff.hunks(&buffer, &cx).next().unwrap(); + let hunk = diff.hunks(&buffer, cx).next().unwrap(); // optimistically unstaged (fine, could also be HasSecondaryHunk) assert_eq!( hunk.secondary_status, @@ -2029,8 +2024,8 @@ mod tests { fn gen_working_copy(rng: &mut StdRng, head: &str) -> String { let mut old_lines = { let mut old_lines = Vec::new(); - let mut old_lines_iter = head.lines(); - while let Some(line) = old_lines_iter.next() { + let old_lines_iter = head.lines(); + for line in old_lines_iter { assert!(!line.ends_with("\n")); old_lines.push(line.to_owned()); } @@ -2134,7 +2129,7 @@ mod tests { diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx) .collect::>() }); - if hunks.len() == 0 { + if hunks.is_empty() { return; } diff --git a/crates/call/src/call_impl/mod.rs b/crates/call/src/call_impl/mod.rs index 71c314932419e1228c74e2d3de547a4e21b152c6..156a80faba61d2a4946bafa5943c167284d14a97 100644 --- a/crates/call/src/call_impl/mod.rs +++ b/crates/call/src/call_impl/mod.rs @@ -116,7 +116,7 @@ impl ActiveCall { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let user_store = this.read_with(&mut cx, |this, _| this.user_store.clone())?; + let user_store = this.read_with(&cx, |this, _| this.user_store.clone())?; let call = IncomingCall { room_id: envelope.payload.room_id, participants: user_store @@ -147,7 +147,7 @@ impl ActiveCall { let mut incoming_call = this.incoming_call.0.borrow_mut(); if incoming_call .as_ref() - .map_or(false, |call| call.room_id == envelope.payload.room_id) + .is_some_and(|call| call.room_id == envelope.payload.room_id) { incoming_call.take(); } diff --git a/crates/call/src/call_impl/participant.rs b/crates/call/src/call_impl/participant.rs index 8e1e264a23d7c58c927d182bbac811a0beb4f02a..6fb6a2eb79b537aa9d7296a323f7d45221a4b05d 100644 --- a/crates/call/src/call_impl/participant.rs +++ b/crates/call/src/call_impl/participant.rs @@ -64,7 +64,7 @@ pub struct RemoteParticipant { impl RemoteParticipant { pub fn has_video_tracks(&self) -> bool { - return !self.video_tracks.is_empty(); + !self.video_tracks.is_empty() } pub fn can_write(&self) -> bool { diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index afeee4c924feb2990668f953d5b2f7dfcff26f34..ffe4c6c25191dc8f9087ccfcc77252b8e5a25a13 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -10,10 +10,10 @@ use client::{ }; use collections::{BTreeMap, HashMap, HashSet}; use fs::Fs; -use futures::{FutureExt, StreamExt}; +use futures::StreamExt; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, ScreenCaptureSource, - ScreenCaptureStream, Task, WeakEntity, + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FutureExt as _, + ScreenCaptureSource, ScreenCaptureStream, Task, Timeout, WeakEntity, }; use gpui_tokio::Tokio; use language::LanguageRegistry; @@ -370,57 +370,53 @@ impl Room { })?; // Wait for client to re-establish a connection to the server. - { - let mut reconnection_timeout = - cx.background_executor().timer(RECONNECT_TIMEOUT).fuse(); - let client_reconnection = async { - let mut remaining_attempts = 3; - while remaining_attempts > 0 { - if client_status.borrow().is_connected() { - log::info!("client reconnected, attempting to rejoin room"); - - let Some(this) = this.upgrade() else { break }; - match this.update(cx, |this, cx| this.rejoin(cx)) { - Ok(task) => { - if task.await.log_err().is_some() { - return true; - } else { - remaining_attempts -= 1; - } + let executor = cx.background_executor().clone(); + let client_reconnection = async { + let mut remaining_attempts = 3; + while remaining_attempts > 0 { + if client_status.borrow().is_connected() { + log::info!("client reconnected, attempting to rejoin room"); + + let Some(this) = this.upgrade() else { break }; + match this.update(cx, |this, cx| this.rejoin(cx)) { + Ok(task) => { + if task.await.log_err().is_some() { + return true; + } else { + remaining_attempts -= 1; } - Err(_app_dropped) => return false, } - } else if client_status.borrow().is_signed_out() { - return false; + Err(_app_dropped) => return false, } - - log::info!( - "waiting for client status change, remaining attempts {}", - remaining_attempts - ); - client_status.next().await; + } else if client_status.borrow().is_signed_out() { + return false; } - false + + log::info!( + "waiting for client status change, remaining attempts {}", + remaining_attempts + ); + client_status.next().await; } - .fuse(); - futures::pin_mut!(client_reconnection); - - futures::select_biased! { - reconnected = client_reconnection => { - if reconnected { - log::info!("successfully reconnected to room"); - // If we successfully joined the room, go back around the loop - // waiting for future connection status changes. - continue; - } - } - _ = reconnection_timeout => { - log::info!("room reconnection timeout expired"); - } + false + }; + + match client_reconnection + .with_timeout(RECONNECT_TIMEOUT, &executor) + .await + { + Ok(true) => { + log::info!("successfully reconnected to room"); + // If we successfully joined the room, go back around the loop + // waiting for future connection status changes. + continue; + } + Ok(false) => break, + Err(Timeout) => { + log::info!("room reconnection timeout expired"); + break; } } - - break; } } @@ -831,24 +827,23 @@ impl Room { ); Audio::play_sound(Sound::Joined, cx); - if let Some(livekit_participants) = &livekit_participants { - if let Some(livekit_participant) = livekit_participants + if let Some(livekit_participants) = &livekit_participants + && let Some(livekit_participant) = livekit_participants .get(&ParticipantIdentity(user.id.to_string())) + { + for publication in + livekit_participant.track_publications().into_values() { - for publication in - livekit_participant.track_publications().into_values() - { - if let Some(track) = publication.track() { - this.livekit_room_updated( - RoomEvent::TrackSubscribed { - track, - publication, - participant: livekit_participant.clone(), - }, - cx, - ) - .warn_on_err(); - } + if let Some(track) = publication.track() { + this.livekit_room_updated( + RoomEvent::TrackSubscribed { + track, + publication, + participant: livekit_participant.clone(), + }, + cx, + ) + .warn_on_err(); } } } @@ -944,10 +939,8 @@ impl Room { self.client.user_id() ) })?; - if self.live_kit.as_ref().map_or(true, |kit| kit.deafened) { - if publication.is_audio() { - publication.set_enabled(false, cx); - } + if self.live_kit.as_ref().is_none_or(|kit| kit.deafened) && publication.is_audio() { + publication.set_enabled(false, cx); } match track { livekit_client::RemoteTrack::Audio(track) => { @@ -1009,10 +1002,10 @@ impl Room { for (sid, participant) in &mut self.remote_participants { participant.speaking = speaker_ids.binary_search(sid).is_ok(); } - if let Some(id) = self.client.user_id() { - if let Some(room) = &mut self.live_kit { - room.speaking = speaker_ids.binary_search(&id).is_ok(); - } + if let Some(id) = self.client.user_id() + && let Some(room) = &mut self.live_kit + { + room.speaking = speaker_ids.binary_search(&id).is_ok(); } } @@ -1046,18 +1039,16 @@ impl Room { if let LocalTrack::Published { track_publication, .. } = &room.microphone_track + && track_publication.sid() == publication.sid() { - if track_publication.sid() == publication.sid() { - room.microphone_track = LocalTrack::None; - } + room.microphone_track = LocalTrack::None; } if let LocalTrack::Published { track_publication, .. } = &room.screen_track + && track_publication.sid() == publication.sid() { - if track_publication.sid() == publication.sid() { - room.screen_track = LocalTrack::None; - } + room.screen_track = LocalTrack::None; } } } @@ -1182,7 +1173,7 @@ impl Room { this.update(cx, |this, cx| { this.shared_projects.insert(project.downgrade()); let active_project = this.local_participant.active_project.as_ref(); - if active_project.map_or(false, |location| *location == project) { + if active_project.is_some_and(|location| *location == project) { this.set_location(Some(&project), cx) } else { Task::ready(Ok(())) @@ -1255,9 +1246,9 @@ impl Room { } pub fn is_sharing_screen(&self) -> bool { - self.live_kit.as_ref().map_or(false, |live_kit| { - !matches!(live_kit.screen_track, LocalTrack::None) - }) + self.live_kit + .as_ref() + .is_some_and(|live_kit| !matches!(live_kit.screen_track, LocalTrack::None)) } pub fn shared_screen_id(&self) -> Option { @@ -1270,13 +1261,13 @@ impl Room { } pub fn is_sharing_mic(&self) -> bool { - self.live_kit.as_ref().map_or(false, |live_kit| { - !matches!(live_kit.microphone_track, LocalTrack::None) - }) + self.live_kit + .as_ref() + .is_some_and(|live_kit| !matches!(live_kit.microphone_track, LocalTrack::None)) } pub fn is_muted(&self) -> bool { - self.live_kit.as_ref().map_or(false, |live_kit| { + self.live_kit.as_ref().is_some_and(|live_kit| { matches!(live_kit.microphone_track, LocalTrack::None) || live_kit.muted_by_user || live_kit.deafened @@ -1286,13 +1277,13 @@ impl Room { pub fn muted_by_user(&self) -> bool { self.live_kit .as_ref() - .map_or(false, |live_kit| live_kit.muted_by_user) + .is_some_and(|live_kit| live_kit.muted_by_user) } pub fn is_speaking(&self) -> bool { self.live_kit .as_ref() - .map_or(false, |live_kit| live_kit.speaking) + .is_some_and(|live_kit| live_kit.speaking) } pub fn is_deafened(&self) -> Option { @@ -1488,10 +1479,8 @@ impl Room { self.set_deafened(deafened, cx); - if should_change_mute { - if let Some(task) = self.set_mute(deafened, cx) { - task.detach_and_log_err(cx); - } + if should_change_mute && let Some(task) = self.set_mute(deafened, cx) { + task.detach_and_log_err(cx); } } } diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 183f7eb3c6a47dad4cb35b95dd2d3e096e0a612f..828248b330b6ef6cfe0e13eab426de2900d364b2 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -82,7 +82,7 @@ impl ChannelBuffer { collaborators: Default::default(), acknowledge_task: None, channel_id: channel.id, - subscription: Some(subscription.set_entity(&cx.entity(), &mut cx.to_async())), + subscription: Some(subscription.set_entity(&cx.entity(), &cx.to_async())), user_store, channel_store, }; @@ -110,7 +110,7 @@ impl ChannelBuffer { let Ok(subscription) = self.client.subscribe_to_entity(self.channel_id.0) else { return; }; - self.subscription = Some(subscription.set_entity(&cx.entity(), &mut cx.to_async())); + self.subscription = Some(subscription.set_entity(&cx.entity(), &cx.to_async())); cx.emit(ChannelBufferEvent::Connected); } } @@ -135,7 +135,7 @@ impl ChannelBuffer { } } - for (_, old_collaborator) in &self.collaborators { + for old_collaborator in self.collaborators.values() { if !new_collaborators.contains_key(&old_collaborator.peer_id) { self.buffer.update(cx, |buffer, cx| { buffer.remove_peer(old_collaborator.replica_id, cx) @@ -191,12 +191,11 @@ impl ChannelBuffer { operation, is_local: true, } => { - if *ZED_ALWAYS_ACTIVE { - if let language::Operation::UpdateSelections { selections, .. } = operation { - if selections.is_empty() { - return; - } - } + if *ZED_ALWAYS_ACTIVE + && let language::Operation::UpdateSelections { selections, .. } = operation + && selections.is_empty() + { + return; } let operation = language::proto::serialize_operation(operation); self.client diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 4ac37ffd14ca2602756afecc788aae9f6065cad9..baf23ac39f983c018da2f291bec7879913f12a58 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -329,24 +329,24 @@ impl ChannelChat { loop { let step = chat .update(&mut cx, |chat, cx| { - if let Some(first_id) = chat.first_loaded_message_id() { - if first_id <= message_id { - let mut cursor = chat - .messages - .cursor::>(&()); - let message_id = ChannelMessageId::Saved(message_id); - cursor.seek(&message_id, Bias::Left); - return ControlFlow::Break( - if cursor - .item() - .map_or(false, |message| message.id == message_id) - { - Some(cursor.start().1.0) - } else { - None - }, - ); - } + if let Some(first_id) = chat.first_loaded_message_id() + && first_id <= message_id + { + let mut cursor = chat + .messages + .cursor::>(&()); + let message_id = ChannelMessageId::Saved(message_id); + cursor.seek(&message_id, Bias::Left); + return ControlFlow::Break( + if cursor + .item() + .is_some_and(|message| message.id == message_id) + { + Some(cursor.start().1.0) + } else { + None + }, + ); } ControlFlow::Continue(chat.load_more_messages(cx)) }) @@ -359,22 +359,21 @@ impl ChannelChat { } pub fn acknowledge_last_message(&mut self, cx: &mut Context) { - if let ChannelMessageId::Saved(latest_message_id) = self.messages.summary().max_id { - if self + if let ChannelMessageId::Saved(latest_message_id) = self.messages.summary().max_id + && self .last_acknowledged_id - .map_or(true, |acknowledged_id| acknowledged_id < latest_message_id) - { - self.rpc - .send(proto::AckChannelMessage { - channel_id: self.channel_id.0, - message_id: latest_message_id, - }) - .ok(); - self.last_acknowledged_id = Some(latest_message_id); - self.channel_store.update(cx, |store, cx| { - store.acknowledge_message_id(self.channel_id, latest_message_id, cx); - }); - } + .is_none_or(|acknowledged_id| acknowledged_id < latest_message_id) + { + self.rpc + .send(proto::AckChannelMessage { + channel_id: self.channel_id.0, + message_id: latest_message_id, + }) + .ok(); + self.last_acknowledged_id = Some(latest_message_id); + self.channel_store.update(cx, |store, cx| { + store.acknowledge_message_id(self.channel_id, latest_message_id, cx); + }); } } @@ -407,10 +406,10 @@ impl ChannelChat { let missing_ancestors = loaded_messages .iter() .filter_map(|message| { - if let Some(ancestor_id) = message.reply_to_message_id { - if !loaded_message_ids.contains(&ancestor_id) { - return Some(ancestor_id); - } + if let Some(ancestor_id) = message.reply_to_message_id + && !loaded_message_ids.contains(&ancestor_id) + { + return Some(ancestor_id); } None }) @@ -533,7 +532,7 @@ impl ChannelChat { message: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { - let user_store = this.read_with(&mut cx, |this, _| this.user_store.clone())?; + let user_store = this.read_with(&cx, |this, _| this.user_store.clone())?; let message = message.payload.message.context("empty message")?; let message_id = message.id; @@ -565,7 +564,7 @@ impl ChannelChat { message: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { - let user_store = this.read_with(&mut cx, |this, _| this.user_store.clone())?; + let user_store = this.read_with(&cx, |this, _| this.user_store.clone())?; let message = message.payload.message.context("empty message")?; let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?; @@ -613,7 +612,7 @@ impl ChannelChat { while let Some(message) = old_cursor.item() { let message_ix = old_cursor.start().1.0; if nonces.contains(&message.nonce) { - if ranges.last().map_or(false, |r| r.end == message_ix) { + if ranges.last().is_some_and(|r| r.end == message_ix) { ranges.last_mut().unwrap().end += 1; } else { ranges.push(message_ix..message_ix + 1); @@ -646,32 +645,32 @@ impl ChannelChat { fn message_removed(&mut self, id: u64, cx: &mut Context) { let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left); - if let Some(item) = cursor.item() { - if item.id == ChannelMessageId::Saved(id) { - let deleted_message_ix = messages.summary().count; - cursor.next(); - messages.append(cursor.suffix(), &()); - drop(cursor); - self.messages = messages; - - // If the message that was deleted was the last acknowledged message, - // replace the acknowledged message with an earlier one. - self.channel_store.update(cx, |store, _| { - let summary = self.messages.summary(); - if summary.count == 0 { - store.set_acknowledged_message_id(self.channel_id, None); - } else if deleted_message_ix == summary.count { - if let ChannelMessageId::Saved(id) = summary.max_id { - store.set_acknowledged_message_id(self.channel_id, Some(id)); - } - } - }); + if let Some(item) = cursor.item() + && item.id == ChannelMessageId::Saved(id) + { + let deleted_message_ix = messages.summary().count; + cursor.next(); + messages.append(cursor.suffix(), &()); + drop(cursor); + self.messages = messages; + + // If the message that was deleted was the last acknowledged message, + // replace the acknowledged message with an earlier one. + self.channel_store.update(cx, |store, _| { + let summary = self.messages.summary(); + if summary.count == 0 { + store.set_acknowledged_message_id(self.channel_id, None); + } else if deleted_message_ix == summary.count + && let ChannelMessageId::Saved(id) = summary.max_id + { + store.set_acknowledged_message_id(self.channel_id, Some(id)); + } + }); - cx.emit(ChannelChatEvent::MessagesUpdated { - old_range: deleted_message_ix..deleted_message_ix + 1, - new_count: 0, - }); - } + cx.emit(ChannelChatEvent::MessagesUpdated { + old_range: deleted_message_ix..deleted_message_ix + 1, + new_count: 0, + }); } } diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 4ad156b9fb08e8af95e5ea49132c4c4786e348a1..daa8a91c7c8952804c854b170d0bc2e1aa817631 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -262,13 +262,12 @@ impl ChannelStore { } } status = status_receiver.next().fuse() => { - if let Some(status) = status { - if status.is_connected() { + if let Some(status) = status + && status.is_connected() { this.update(cx, |this, _cx| { this.initialize(); }).ok(); } - } continue; } _ = timer => { @@ -336,10 +335,10 @@ impl ChannelStore { } pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &App) -> bool { - if let Some(buffer) = self.opened_buffers.get(&channel_id) { - if let OpenEntityHandle::Open(buffer) = buffer { - return buffer.upgrade().is_some(); - } + if let Some(buffer) = self.opened_buffers.get(&channel_id) + && let OpenEntityHandle::Open(buffer) = buffer + { + return buffer.upgrade().is_some(); } false } @@ -408,13 +407,12 @@ impl ChannelStore { pub fn last_acknowledge_message_id(&self, channel_id: ChannelId) -> Option { self.channel_states.get(&channel_id).and_then(|state| { - if let Some(last_message_id) = state.latest_chat_message { - if state + if let Some(last_message_id) = state.latest_chat_message + && state .last_acknowledged_message_id() .is_some_and(|id| id < last_message_id) - { - return state.last_acknowledged_message_id(); - } + { + return state.last_acknowledged_message_id(); } None @@ -570,16 +568,14 @@ impl ChannelStore { self.channel_index .by_id() .get(&channel_id) - .map_or(false, |channel| channel.is_root_channel()) + .is_some_and(|channel| channel.is_root_channel()) } pub fn is_public_channel(&self, channel_id: ChannelId) -> bool { self.channel_index .by_id() .get(&channel_id) - .map_or(false, |channel| { - channel.visibility == ChannelVisibility::Public - }) + .is_some_and(|channel| channel.visibility == ChannelVisibility::Public) } pub fn channel_capability(&self, channel_id: ChannelId) -> Capability { @@ -910,9 +906,9 @@ impl ChannelStore { async fn handle_update_channels( this: Entity, message: TypedEnvelope, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result<()> { - this.read_with(&mut cx, |this, _| { + this.read_with(&cx, |this, _| { this.update_channels_tx .unbounded_send(message.payload) .unwrap(); @@ -962,27 +958,27 @@ impl ChannelStore { self.disconnect_channel_buffers_task.take(); for chat in self.opened_chats.values() { - if let OpenEntityHandle::Open(chat) = chat { - if let Some(chat) = chat.upgrade() { - chat.update(cx, |chat, cx| { - chat.rejoin(cx); - }); - } + if let OpenEntityHandle::Open(chat) = chat + && let Some(chat) = chat.upgrade() + { + chat.update(cx, |chat, cx| { + chat.rejoin(cx); + }); } } let mut buffer_versions = Vec::new(); for buffer in self.opened_buffers.values() { - if let OpenEntityHandle::Open(buffer) = buffer { - if let Some(buffer) = buffer.upgrade() { - let channel_buffer = buffer.read(cx); - let buffer = channel_buffer.buffer().read(cx); - buffer_versions.push(proto::ChannelBufferVersion { - channel_id: channel_buffer.channel_id.0, - epoch: channel_buffer.epoch(), - version: language::proto::serialize_version(&buffer.version()), - }); - } + if let OpenEntityHandle::Open(buffer) = buffer + && let Some(buffer) = buffer.upgrade() + { + let channel_buffer = buffer.read(cx); + let buffer = channel_buffer.buffer().read(cx); + buffer_versions.push(proto::ChannelBufferVersion { + channel_id: channel_buffer.channel_id.0, + epoch: channel_buffer.epoch(), + version: language::proto::serialize_version(&buffer.version()), + }); } } @@ -1077,11 +1073,11 @@ impl ChannelStore { if let Some(this) = this.upgrade() { this.update(cx, |this, cx| { - for (_, buffer) in &this.opened_buffers { - if let OpenEntityHandle::Open(buffer) = &buffer { - if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, |buffer, cx| buffer.disconnect(cx)); - } + for buffer in this.opened_buffers.values() { + if let OpenEntityHandle::Open(buffer) = &buffer + && let Some(buffer) = buffer.upgrade() + { + buffer.update(cx, |buffer, cx| buffer.disconnect(cx)); } } }) @@ -1157,10 +1153,9 @@ impl ChannelStore { } if let Some(OpenEntityHandle::Open(buffer)) = self.opened_buffers.remove(&channel_id) + && let Some(buffer) = buffer.upgrade() { - if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, ChannelBuffer::disconnect); - } + buffer.update(cx, ChannelBuffer::disconnect); } } } @@ -1170,12 +1165,11 @@ impl ChannelStore { let id = ChannelId(channel.id); let channel_changed = index.insert(channel); - if channel_changed { - if let Some(OpenEntityHandle::Open(buffer)) = self.opened_buffers.get(&id) { - if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, ChannelBuffer::channel_changed); - } - } + if channel_changed + && let Some(OpenEntityHandle::Open(buffer)) = self.opened_buffers.get(&id) + && let Some(buffer) = buffer.upgrade() + { + buffer.update(cx, ChannelBuffer::channel_changed); } } diff --git a/crates/channel/src/channel_store_tests.rs b/crates/channel/src/channel_store_tests.rs index c92226eeebd131170b0a5b04e4ed7f42c19a64fc..2a914330847053bc044da07e11642906b65a3159 100644 --- a/crates/channel/src/channel_store_tests.rs +++ b/crates/channel/src/channel_store_tests.rs @@ -438,7 +438,7 @@ fn init_test(cx: &mut App) -> Entity { let clock = Arc::new(FakeSystemClock::new()); let http = FakeHttpClient::with_404_response(); - let client = Client::new(clock, http.clone(), cx); + let client = Client::new(clock, http, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); client::init(&client, cx); diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 8d6cd2544ad4c7cc152bad50b16bad55c0e1ed55..b84e7a9f7a53a471bd854a15377c79f45003aaf4 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -363,7 +363,7 @@ fn anonymous_fd(path: &str) -> Option { let fd: fd::RawFd = fd_str.parse().ok()?; let file = unsafe { fs::File::from_raw_fd(fd) }; - return Some(file); + Some(file) } #[cfg(any(target_os = "macos", target_os = "freebsd"))] { @@ -381,13 +381,13 @@ fn anonymous_fd(path: &str) -> Option { } let fd: fd::RawFd = fd_str.parse().ok()?; let file = unsafe { fs::File::from_raw_fd(fd) }; - return Some(file); + Some(file) } #[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "freebsd")))] { _ = path; // not implemented for bsd, windows. Could be, but isn't yet - return None; + None } } @@ -494,11 +494,11 @@ mod linux { Ok(Fork::Parent(_)) => Ok(()), Ok(Fork::Child) => { unsafe { std::env::set_var(FORCE_CLI_MODE_ENV_VAR_NAME, "") }; - if let Err(_) = fork::setsid() { + if fork::setsid().is_err() { eprintln!("failed to setsid: {}", std::io::Error::last_os_error()); process::exit(1); } - if let Err(_) = fork::close_fd() { + if fork::close_fd().is_err() { eprintln!("failed to close_fd: {}", std::io::Error::last_os_error()); } let error = @@ -518,11 +518,11 @@ mod linux { ) -> Result<(), std::io::Error> { for _ in 0..100 { thread::sleep(Duration::from_millis(10)); - if sock.connect_addr(&sock_addr).is_ok() { + if sock.connect_addr(sock_addr).is_ok() { return Ok(()); } } - sock.connect_addr(&sock_addr) + sock.connect_addr(sock_addr) } } } @@ -534,8 +534,8 @@ mod flatpak { use std::process::Command; use std::{env, process}; - const EXTRA_LIB_ENV_NAME: &'static str = "ZED_FLATPAK_LIB_PATH"; - const NO_ESCAPE_ENV_NAME: &'static str = "ZED_FLATPAK_NO_ESCAPE"; + const EXTRA_LIB_ENV_NAME: &str = "ZED_FLATPAK_LIB_PATH"; + const NO_ESCAPE_ENV_NAME: &str = "ZED_FLATPAK_NO_ESCAPE"; /// Adds bundled libraries to LD_LIBRARY_PATH if running under flatpak pub fn ld_extra_libs() { @@ -586,14 +586,11 @@ mod flatpak { pub fn set_bin_if_no_escape(mut args: super::Args) -> super::Args { if env::var(NO_ESCAPE_ENV_NAME).is_ok() - && env::var("FLATPAK_ID").map_or(false, |id| id.starts_with("dev.zed.Zed")) + && env::var("FLATPAK_ID").is_ok_and(|id| id.starts_with("dev.zed.Zed")) + && args.zed.is_none() { - if args.zed.is_none() { - args.zed = Some("/app/libexec/zed-editor".into()); - unsafe { - env::set_var("ZED_UPDATE_EXPLANATION", "Please use flatpak to update zed") - }; - } + args.zed = Some("/app/libexec/zed-editor".into()); + unsafe { env::set_var("ZED_UPDATE_EXPLANATION", "Please use flatpak to update zed") }; } args } @@ -929,7 +926,7 @@ mod mac_os { fn path(&self) -> PathBuf { match self { - Bundle::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed").clone(), + Bundle::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed"), Bundle::LocalPath { executable, .. } => executable.clone(), } } @@ -957,17 +954,14 @@ mod mac_os { ) -> Result<()> { use anyhow::bail; - let app_id_prompt = format!("id of app \"{}\"", channel.display_name()); - let app_id_output = Command::new("osascript") + let app_path_prompt = format!( + "POSIX path of (path to application \"{}\")", + channel.display_name() + ); + let app_path_output = Command::new("osascript") .arg("-e") - .arg(&app_id_prompt) + .arg(&app_path_prompt) .output()?; - if !app_id_output.status.success() { - bail!("Could not determine app id for {}", channel.display_name()); - } - let app_name = String::from_utf8(app_id_output.stdout)?.trim().to_owned(); - let app_path_prompt = format!("kMDItemCFBundleIdentifier == '{app_name}'"); - let app_path_output = Command::new("mdfind").arg(app_path_prompt).output()?; if !app_path_output.status.success() { bail!( "Could not determine app path for {}", diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 365625b44535e474baecf058c98f54aaf05b5e49..5c6d1157fd710de0e1dd160b611c0bd7c6667c4d 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -44,6 +44,7 @@ rpc = { workspace = true, features = ["gpui"] } schemars.workspace = true serde.workspace = true serde_json.workspace = true +serde_urlencoded.workspace = true settings.workspace = true sha2.workspace = true smol.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index f09c012a858e3cf97166dae9dbdbeb3da51b96b6..ed3f1149433a9532ef2c08032ffa494cf09dfb4c 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -76,7 +76,7 @@ pub static ZED_APP_PATH: LazyLock> = LazyLock::new(|| std::env::var("ZED_APP_PATH").ok().map(PathBuf::from)); pub static ZED_ALWAYS_ACTIVE: LazyLock = - LazyLock::new(|| std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| !e.is_empty())); + LazyLock::new(|| std::env::var("ZED_ALWAYS_ACTIVE").is_ok_and(|e| !e.is_empty())); pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(500); pub const MAX_RECONNECTION_DELAY: Duration = Duration::from_secs(30); @@ -162,7 +162,7 @@ pub fn init(client: &Arc, cx: &mut App) { let client = client.clone(); move |_: &SignIn, cx| { if let Some(client) = client.upgrade() { - cx.spawn(async move |cx| client.sign_in_with_optional_connect(true, &cx).await) + cx.spawn(async move |cx| client.sign_in_with_optional_connect(true, cx).await) .detach_and_log_err(cx); } } @@ -173,7 +173,7 @@ pub fn init(client: &Arc, cx: &mut App) { move |_: &SignOut, cx| { if let Some(client) = client.upgrade() { cx.spawn(async move |cx| { - client.sign_out(&cx).await; + client.sign_out(cx).await; }) .detach(); } @@ -181,11 +181,11 @@ pub fn init(client: &Arc, cx: &mut App) { }); cx.on_action({ - let client = client.clone(); + let client = client; move |_: &Reconnect, cx| { if let Some(client) = client.upgrade() { cx.spawn(async move |cx| { - client.reconnect(&cx); + client.reconnect(cx); }) .detach(); } @@ -677,7 +677,7 @@ impl Client { let mut delay = INITIAL_RECONNECTION_DELAY; loop { - match client.connect(true, &cx).await { + match client.connect(true, cx).await { ConnectionResult::Timeout => { log::error!("client connect attempt timed out") } @@ -701,7 +701,7 @@ impl Client { Status::ReconnectionError { next_reconnection: Instant::now() + delay, }, - &cx, + cx, ); let jitter = Duration::from_millis(rng.gen_range(0..delay.as_millis() as u64)); @@ -791,7 +791,7 @@ impl Client { Arc::new(move |subscriber, envelope, client, cx| { let subscriber = subscriber.downcast::().unwrap(); let envelope = envelope.into_any().downcast::>().unwrap(); - handler(subscriber, *envelope, client.clone(), cx).boxed_local() + handler(subscriber, *envelope, client, cx).boxed_local() }), ); if prev_handler.is_some() { @@ -864,22 +864,23 @@ impl Client { let mut credentials = None; let old_credentials = self.state.read().credentials.clone(); - if let Some(old_credentials) = old_credentials { - if self.validate_credentials(&old_credentials, cx).await? { - credentials = Some(old_credentials); - } + if let Some(old_credentials) = old_credentials + && self.validate_credentials(&old_credentials, cx).await? + { + credentials = Some(old_credentials); } - if credentials.is_none() && try_provider { - if let Some(stored_credentials) = self.credentials_provider.read_credentials(cx).await { - if self.validate_credentials(&stored_credentials, cx).await? { - credentials = Some(stored_credentials); - } else { - self.credentials_provider - .delete_credentials(cx) - .await - .log_err(); - } + if credentials.is_none() + && try_provider + && let Some(stored_credentials) = self.credentials_provider.read_credentials(cx).await + { + if self.validate_credentials(&stored_credentials, cx).await? { + credentials = Some(stored_credentials); + } else { + self.credentials_provider + .delete_credentials(cx) + .await + .log_err(); } } @@ -973,6 +974,11 @@ impl Client { try_provider: bool, cx: &AsyncApp, ) -> Result<()> { + // Don't try to sign in again if we're already connected to Collab, as it will temporarily disconnect us. + if self.status().borrow().is_connected() { + return Ok(()); + } + let (is_staff_tx, is_staff_rx) = oneshot::channel::(); let mut is_staff_tx = Some(is_staff_tx); cx.update(|cx| { @@ -1023,11 +1029,11 @@ impl Client { Status::SignedOut | Status::Authenticated => true, Status::ConnectionError | Status::ConnectionLost - | Status::Authenticating { .. } + | Status::Authenticating | Status::AuthenticationError - | Status::Reauthenticating { .. } + | Status::Reauthenticating | Status::ReconnectionError { .. } => false, - Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => { + Status::Connected { .. } | Status::Connecting | Status::Reconnecting => { return ConnectionResult::Result(Ok(())); } Status::UpgradeRequired => { @@ -1151,7 +1157,7 @@ impl Client { let this = self.clone(); async move |cx| { while let Some(message) = incoming.next().await { - this.handle_message(message, &cx); + this.handle_message(message, cx); // Don't starve the main thread when receiving lots of messages at once. smol::future::yield_now().await; } @@ -1169,12 +1175,12 @@ impl Client { peer_id, }) { - this.set_status(Status::SignedOut, &cx); + this.set_status(Status::SignedOut, cx); } } Err(err) => { log::error!("connection error: {:?}", err); - this.set_status(Status::ConnectionLost, &cx); + this.set_status(Status::ConnectionLost, cx); } } }) @@ -1410,6 +1416,12 @@ impl Client { open_url_tx.send(url).log_err(); + #[derive(Deserialize)] + struct CallbackParams { + pub user_id: String, + pub access_token: String, + } + // Receive the HTTP request from the user's browser. Retrieve the user id and encrypted // access token from the query params. // @@ -1420,17 +1432,13 @@ impl Client { for _ in 0..100 { if let Some(req) = server.recv_timeout(Duration::from_secs(1))? { let path = req.url(); - let mut user_id = None; - let mut access_token = None; let url = Url::parse(&format!("http://example.com{}", path)) .context("failed to parse login notification url")?; - for (key, value) in url.query_pairs() { - if key == "access_token" { - access_token = Some(value.to_string()); - } else if key == "user_id" { - user_id = Some(value.to_string()); - } - } + let callback_params: CallbackParams = + serde_urlencoded::from_str(url.query().unwrap_or_default()) + .context( + "failed to parse sign-in callback query parameters", + )?; let post_auth_url = http.build_url("/native_app_signin_succeeded"); @@ -1445,8 +1453,8 @@ impl Client { ) .context("failed to respond to login http request")?; return Ok(( - user_id.context("missing user_id parameter")?, - access_token.context("missing access_token parameter")?, + callback_params.user_id, + callback_params.access_token, )); } } @@ -1894,10 +1902,7 @@ mod tests { assert!(matches!(status.next().await, Some(Status::Connecting))); executor.advance_clock(CONNECTION_TIMEOUT); - assert!(matches!( - status.next().await, - Some(Status::ConnectionError { .. }) - )); + assert!(matches!(status.next().await, Some(Status::ConnectionError))); auth_and_connect.await.into_response().unwrap_err(); // Allow the connection to be established. @@ -1921,10 +1926,7 @@ mod tests { }) }); executor.advance_clock(2 * INITIAL_RECONNECTION_DELAY); - assert!(matches!( - status.next().await, - Some(Status::Reconnecting { .. }) - )); + assert!(matches!(status.next().await, Some(Status::Reconnecting))); executor.advance_clock(CONNECTION_TIMEOUT); assert!(matches!( @@ -2040,10 +2042,7 @@ mod tests { assert_eq!(*auth_count.lock(), 1); assert_eq!(*dropped_auth_count.lock(), 0); - let _authenticate = cx.spawn({ - let client = client.clone(); - |cx| async move { client.connect(false, &cx).await } - }); + let _authenticate = cx.spawn(|cx| async move { client.connect(false, &cx).await }); executor.run_until_parked(); assert_eq!(*auth_count.lock(), 2); assert_eq!(*dropped_auth_count.lock(), 1); @@ -2065,8 +2064,8 @@ mod tests { let (done_tx1, done_rx1) = smol::channel::unbounded(); let (done_tx2, done_rx2) = smol::channel::unbounded(); AnyProtoClient::from(client.clone()).add_entity_message_handler( - move |entity: Entity, _: TypedEnvelope, mut cx| { - match entity.read_with(&mut cx, |entity, _| entity.id).unwrap() { + move |entity: Entity, _: TypedEnvelope, cx| { + match entity.read_with(&cx, |entity, _| entity.id).unwrap() { 1 => done_tx1.try_send(()).unwrap(), 2 => done_tx2.try_send(()).unwrap(), _ => unreachable!(), @@ -2090,17 +2089,17 @@ mod tests { let _subscription1 = client .subscribe_to_entity(1) .unwrap() - .set_entity(&entity1, &mut cx.to_async()); + .set_entity(&entity1, &cx.to_async()); let _subscription2 = client .subscribe_to_entity(2) .unwrap() - .set_entity(&entity2, &mut cx.to_async()); + .set_entity(&entity2, &cx.to_async()); // Ensure dropping a subscription for the same entity type still allows receiving of // messages for other entity IDs of the same type. let subscription3 = client .subscribe_to_entity(3) .unwrap() - .set_entity(&entity3, &mut cx.to_async()); + .set_entity(&entity3, &cx.to_async()); drop(subscription3); server.send(proto::JoinProject { diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 43a1a0b7a4f85fbee43c05292e354bc257e5f941..f3142a0af667d2022dfddd1c0f3f7b309e803d46 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -340,22 +340,35 @@ impl Telemetry { } pub fn log_edit_event(self: &Arc, environment: &'static str, is_via_ssh: bool) { + static LAST_EVENT_TIME: Mutex> = Mutex::new(None); + let mut state = self.state.lock(); let period_data = state.event_coalescer.log_event(environment); drop(state); - if let Some((start, end, environment)) = period_data { - let duration = end - .saturating_duration_since(start) - .min(Duration::from_secs(60 * 60 * 24)) - .as_millis() as i64; + if let Some(mut last_event) = LAST_EVENT_TIME.try_lock() { + let current_time = std::time::Instant::now(); + let last_time = last_event.get_or_insert(current_time); - telemetry::event!( - "Editor Edited", - duration = duration, - environment = environment, - is_via_ssh = is_via_ssh - ); + if current_time.duration_since(*last_time) > Duration::from_secs(60 * 10) { + *last_time = current_time; + } else { + return; + } + + if let Some((start, end, environment)) = period_data { + let duration = end + .saturating_duration_since(start) + .min(Duration::from_secs(60 * 60 * 24)) + .as_millis() as i64; + + telemetry::event!( + "Editor Edited", + duration = duration, + environment = environment, + is_via_ssh = is_via_ssh + ); + } } } @@ -726,7 +739,7 @@ mod tests { ); // Third scan of worktree does not double report, as we already reported - test_project_discovery_helper(telemetry.clone(), vec!["package.json"], None, worktree_id); + test_project_discovery_helper(telemetry, vec!["package.json"], None, worktree_id); } #[gpui::test] @@ -738,7 +751,7 @@ mod tests { let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx)); test_project_discovery_helper( - telemetry.clone(), + telemetry, vec!["package.json", "pnpm-lock.yaml"], Some(vec!["node", "pnpm"]), 1, @@ -754,7 +767,7 @@ mod tests { let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx)); test_project_discovery_helper( - telemetry.clone(), + telemetry, vec!["package.json", "yarn.lock"], Some(vec!["node", "yarn"]), 1, @@ -773,7 +786,7 @@ mod tests { // project type for the same worktree multiple times test_project_discovery_helper( - telemetry.clone().clone(), + telemetry.clone(), vec!["global.json"], Some(vec!["dotnet"]), 1, diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index 439fb100d2244499fa59a81495e282673305e00b..3c451fcb015b5efb4fd860cce09cdb29a8f07379 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -1,16 +1,12 @@ use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore}; use anyhow::{Context as _, Result, anyhow}; -use chrono::Duration; use cloud_api_client::{AuthenticatedUser, GetAuthenticatedUserResponse, PlanInfo}; use cloud_llm_client::{CurrentUsage, Plan, UsageData, UsageLimit}; use futures::{StreamExt, stream::BoxStream}; use gpui::{AppContext as _, BackgroundExecutor, Entity, TestAppContext}; use http_client::{AsyncBody, Method, Request, http}; use parking_lot::Mutex; -use rpc::{ - ConnectionId, Peer, Receipt, TypedEnvelope, - proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse}, -}; +use rpc::{ConnectionId, Peer, Receipt, TypedEnvelope, proto}; use std::sync::Arc; pub struct FakeServer { @@ -187,50 +183,27 @@ impl FakeServer { pub async fn receive(&self) -> Result> { self.executor.start_waiting(); - loop { - let message = self - .state - .lock() - .incoming - .as_mut() - .expect("not connected") - .next() - .await - .context("other half hung up")?; - self.executor.finish_waiting(); - let type_name = message.payload_type_name(); - let message = message.into_any(); - - if message.is::>() { - return Ok(*message.downcast().unwrap()); - } - - let accepted_tos_at = chrono::Utc::now() - .checked_sub_signed(Duration::hours(5)) - .expect("failed to build accepted_tos_at") - .timestamp() as u64; - - if message.is::>() { - self.respond( - message - .downcast::>() - .unwrap() - .receipt(), - GetPrivateUserInfoResponse { - metrics_id: "the-metrics-id".into(), - staff: false, - flags: Default::default(), - accepted_tos_at: Some(accepted_tos_at), - }, - ); - continue; - } + let message = self + .state + .lock() + .incoming + .as_mut() + .expect("not connected") + .next() + .await + .context("other half hung up")?; + self.executor.finish_waiting(); + let type_name = message.payload_type_name(); + let message = message.into_any(); - panic!( - "fake server received unexpected message type: {:?}", - type_name - ); + if message.is::>() { + return Ok(*message.downcast().unwrap()); } + + panic!( + "fake server received unexpected message type: {:?}", + type_name + ); } pub fn respond(&self, receipt: Receipt, response: T::Response) { diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 9f76dd7ad08034e814d4be0ff825f37415a3bccd..20f99e394460f2f594c078dfde6cd568dfc7906b 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -41,7 +41,7 @@ impl std::fmt::Display for ChannelId { pub struct ProjectId(pub u64); impl ProjectId { - pub fn to_proto(&self) -> u64 { + pub fn to_proto(self) -> u64 { self.0 } } @@ -177,7 +177,6 @@ impl UserStore { let (mut current_user_tx, current_user_rx) = watch::channel(); let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded(); let rpc_subscriptions = vec![ - client.add_message_handler(cx.weak_entity(), Self::handle_update_plan), client.add_message_handler(cx.weak_entity(), Self::handle_update_contacts), client.add_message_handler(cx.weak_entity(), Self::handle_update_invite_info), client.add_message_handler(cx.weak_entity(), Self::handle_show_contacts), @@ -226,17 +225,35 @@ impl UserStore { match status { Status::Authenticated | Status::Connected { .. } => { if let Some(user_id) = client.user_id() { - let response = client.cloud_client().get_authenticated_user().await; - let mut current_user = None; + let response = client + .cloud_client() + .get_authenticated_user() + .await + .log_err(); + + let current_user_and_response = if let Some(response) = response { + let user = Arc::new(User { + id: user_id, + github_login: response.user.github_login.clone().into(), + avatar_uri: response.user.avatar_url.clone().into(), + name: response.user.name.clone(), + }); + + Some((user, response)) + } else { + None + }; + current_user_tx + .send( + current_user_and_response + .as_ref() + .map(|(user, _)| user.clone()), + ) + .await + .ok(); + cx.update(|cx| { - if let Some(response) = response.log_err() { - let user = Arc::new(User { - id: user_id, - github_login: response.user.github_login.clone().into(), - avatar_uri: response.user.avatar_url.clone().into(), - name: response.user.name.clone(), - }); - current_user = Some(user.clone()); + if let Some((user, response)) = current_user_and_response { this.update(cx, |this, cx| { this.by_github_login .insert(user.github_login.clone(), user_id); @@ -247,7 +264,6 @@ impl UserStore { anyhow::Ok(()) } })??; - current_user_tx.send(current_user).await.ok(); this.update(cx, |_, cx| cx.notify())?; } @@ -316,9 +332,9 @@ impl UserStore { async fn handle_update_contacts( this: Entity, message: TypedEnvelope, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result<()> { - this.read_with(&mut cx, |this, _| { + this.read_with(&cx, |this, _| { this.update_contacts_tx .unbounded_send(UpdateContacts::Update(message.payload)) .unwrap(); @@ -326,26 +342,6 @@ impl UserStore { Ok(()) } - async fn handle_update_plan( - this: Entity, - _message: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result<()> { - let client = this - .read_with(&cx, |this, _| this.client.upgrade())? - .context("client was dropped")?; - - let response = client - .cloud_client() - .get_authenticated_user() - .await - .context("failed to fetch authenticated user")?; - - this.update(&mut cx, |this, cx| { - this.update_authenticated_user(response, cx); - }) - } - fn update_contacts(&mut self, message: UpdateContacts, cx: &Context) -> Task> { match message { UpdateContacts::Wait(barrier) => { @@ -852,7 +848,7 @@ impl UserStore { pub fn has_accepted_terms_of_service(&self) -> bool { self.accepted_tos_at - .map_or(false, |accepted_tos_at| accepted_tos_at.is_some()) + .is_some_and(|accepted_tos_at| accepted_tos_at.is_some()) } pub fn accept_terms_of_service(&self, cx: &Context) -> Task> { @@ -898,10 +894,10 @@ impl UserStore { let mut ret = Vec::with_capacity(users.len()); for user in users { let user = User::new(user); - if let Some(old) = self.users.insert(user.id, user.clone()) { - if old.github_login != user.github_login { - self.by_github_login.remove(&old.github_login); - } + if let Some(old) = self.users.insert(user.id, user.clone()) + && old.github_login != user.github_login + { + self.by_github_login.remove(&old.github_login); } self.by_github_login .insert(user.github_login.clone(), user.id); @@ -1002,19 +998,6 @@ impl RequestUsage { } } - pub fn from_proto(amount: u32, limit: proto::UsageLimit) -> Option { - let limit = match limit.variant? { - proto::usage_limit::Variant::Limited(limited) => { - UsageLimit::Limited(limited.limit as i32) - } - proto::usage_limit::Variant::Unlimited(_) => UsageLimit::Unlimited, - }; - Some(RequestUsage { - limit, - amount: amount as i32, - }) - } - fn from_headers( limit_name: &str, amount_name: &str, diff --git a/crates/client/src/zed_urls.rs b/crates/client/src/zed_urls.rs index 693c7bf836330fc8c6cd36ca72ee862a9e2b865b..9df41906d79b4d43234a28dde19bd6862469de8c 100644 --- a/crates/client/src/zed_urls.rs +++ b/crates/client/src/zed_urls.rs @@ -35,3 +35,11 @@ pub fn upgrade_to_zed_pro_url(cx: &App) -> String { pub fn terms_of_service(cx: &App) -> String { format!("{server_url}/terms-of-service", server_url = server_url(cx)) } + +/// Returns the URL to Zed AI's privacy and security docs. +pub fn ai_privacy_and_security(cx: &App) -> String { + format!( + "{server_url}/docs/ai/privacy-and-security", + server_url = server_url(cx) + ) +} diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index ef9a1a9a553596baf737c4e1ee60d9b3344f4ecf..92417d8319fc1ce0750f0c2b400e49a24b83e073 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -205,12 +205,12 @@ impl CloudApiClient { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; if response.status() == StatusCode::UNAUTHORIZED { - return Ok(false); + Ok(false) } else { - return Err(anyhow!( + Err(anyhow!( "Failed to get authenticated user.\nStatus: {:?}\nBody: {body}", response.status() - )); + )) } } } diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index e78957ec4905b05bae4752613707f84316edcfba..741945af1087e7a4ff5edfc32cca4d080db3982f 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -263,12 +263,12 @@ pub struct WebSearchBody { pub query: String, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone)] pub struct WebSearchResponse { pub results: Vec, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone)] pub struct WebSearchResult { pub title: String, pub url: String, diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 9af95317e60db78fc93b9a1fa01eaee687fac4fc..4fccd3be7ff8b4d44daf5f761695bdba81bd2ad8 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -19,7 +19,6 @@ test-support = ["sqlite"] [dependencies] anyhow.workspace = true -async-stripe.workspace = true async-trait.workspace = true async-tungstenite.workspace = true aws-config = { version = "1.1.5" } @@ -30,16 +29,13 @@ axum-extra = { version = "0.4", features = ["erased-json"] } base64.workspace = true chrono.workspace = true clock.workspace = true -cloud_llm_client.workspace = true collections.workspace = true dashmap.workspace = true -derive_more.workspace = true envy = "0.4.2" futures.workspace = true gpui.workspace = true hex.workspace = true http_client.workspace = true -jsonwebtoken.workspace = true livekit_api.workspace = true log.workspace = true nanoid.workspace = true @@ -65,7 +61,6 @@ subtle.workspace = true supermaven_api.workspace = true telemetry_events.workspace = true text.workspace = true -thiserror.workspace = true time.workspace = true tokio = { workspace = true, features = ["full"] } toml.workspace = true @@ -136,6 +131,3 @@ util.workspace = true workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } zlog.workspace = true - -[package.metadata.cargo-machete] -ignored = ["async-stripe"] diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml index 45fc018a4afbd22180419742ab50197d13ac3a59..214b550ac20499b8b03cfafeefab9b45d51fcc24 100644 --- a/crates/collab/k8s/collab.template.yml +++ b/crates/collab/k8s/collab.template.yml @@ -219,12 +219,6 @@ spec: secretKeyRef: name: slack key: panics_webhook - - name: STRIPE_API_KEY - valueFrom: - secretKeyRef: - name: stripe - key: api_key - optional: true - name: COMPLETE_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR value: "1000" - name: SUPERMAVEN_ADMIN_API_KEY diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 73d473ab767e633ae2cefc309d87074523811851..43581fd9421e5a8d10460a9ed15c565bd66a6e5e 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -116,6 +116,7 @@ CREATE TABLE "project_repositories" ( "scan_id" INTEGER NOT NULL, "is_deleted" BOOL NOT NULL, "current_merge_conflicts" VARCHAR, + "merge_message" VARCHAR, "branch_summary" VARCHAR, "head_commit_details" VARCHAR, PRIMARY KEY (project_id, id) @@ -474,67 +475,6 @@ CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count"); -CREATE TABLE rate_buckets ( - user_id INT NOT NULL, - rate_limit_name VARCHAR(255) NOT NULL, - token_count INT NOT NULL, - last_refill TIMESTAMP WITHOUT TIME ZONE NOT NULL, - PRIMARY KEY (user_id, rate_limit_name), - FOREIGN KEY (user_id) REFERENCES users (id) -); - -CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); - -CREATE TABLE IF NOT EXISTS billing_preferences ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - user_id INTEGER NOT NULL REFERENCES users (id), - max_monthly_llm_usage_spending_in_cents INTEGER NOT NULL, - model_request_overages_enabled bool NOT NULL DEFAULT FALSE, - model_request_overages_spend_limit_in_cents integer NOT NULL DEFAULT 0 -); - -CREATE UNIQUE INDEX "uix_billing_preferences_on_user_id" ON billing_preferences (user_id); - -CREATE TABLE IF NOT EXISTS billing_customers ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - user_id INTEGER NOT NULL REFERENCES users (id), - has_overdue_invoices BOOLEAN NOT NULL DEFAULT FALSE, - stripe_customer_id TEXT NOT NULL, - trial_started_at TIMESTAMP -); - -CREATE UNIQUE INDEX "uix_billing_customers_on_user_id" ON billing_customers (user_id); - -CREATE UNIQUE INDEX "uix_billing_customers_on_stripe_customer_id" ON billing_customers (stripe_customer_id); - -CREATE TABLE IF NOT EXISTS billing_subscriptions ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - billing_customer_id INTEGER NOT NULL REFERENCES billing_customers (id), - stripe_subscription_id TEXT NOT NULL, - stripe_subscription_status TEXT NOT NULL, - stripe_cancel_at TIMESTAMP, - stripe_cancellation_reason TEXT, - kind TEXT, - stripe_current_period_start BIGINT, - stripe_current_period_end BIGINT -); - -CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id); - -CREATE UNIQUE INDEX "uix_billing_subscriptions_on_stripe_subscription_id" ON billing_subscriptions (stripe_subscription_id); - -CREATE TABLE IF NOT EXISTS processed_stripe_events ( - stripe_event_id TEXT PRIMARY KEY, - stripe_event_type TEXT NOT NULL, - stripe_event_created_timestamp INTEGER NOT NULL, - processed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP -); - -CREATE INDEX "ix_processed_stripe_events_on_stripe_event_created_timestamp" ON processed_stripe_events (stripe_event_created_timestamp); - CREATE TABLE IF NOT EXISTS "breakpoints" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, diff --git a/crates/collab/migrations/20250816124707_make_admin_required_on_users.sql b/crates/collab/migrations/20250816124707_make_admin_required_on_users.sql new file mode 100644 index 0000000000000000000000000000000000000000..e372723d6d5f5e822a2e437cfac4b95bc2023998 --- /dev/null +++ b/crates/collab/migrations/20250816124707_make_admin_required_on_users.sql @@ -0,0 +1,2 @@ +alter table users +alter column admin set not null; diff --git a/crates/collab/migrations/20250816133027_add_orb_customer_id_to_billing_customers.sql b/crates/collab/migrations/20250816133027_add_orb_customer_id_to_billing_customers.sql new file mode 100644 index 0000000000000000000000000000000000000000..ea5e4de52a829413030bb5e206f5c7401381adcf --- /dev/null +++ b/crates/collab/migrations/20250816133027_add_orb_customer_id_to_billing_customers.sql @@ -0,0 +1,2 @@ +alter table billing_customers + add column orb_customer_id text; diff --git a/crates/collab/migrations/20250816135346_drop_rate_buckets_table.sql b/crates/collab/migrations/20250816135346_drop_rate_buckets_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..f51a33ed30d7fb88bc9dc6c82e7217c7e4634b28 --- /dev/null +++ b/crates/collab/migrations/20250816135346_drop_rate_buckets_table.sql @@ -0,0 +1 @@ +drop table rate_buckets; diff --git a/crates/collab/migrations/20250818192156_add_git_merge_message.sql b/crates/collab/migrations/20250818192156_add_git_merge_message.sql new file mode 100644 index 0000000000000000000000000000000000000000..335ea2f82493082e0e20d7762b5282696dc50224 --- /dev/null +++ b/crates/collab/migrations/20250818192156_add_git_merge_message.sql @@ -0,0 +1 @@ +ALTER TABLE "project_repositories" ADD COLUMN "merge_message" VARCHAR; diff --git a/crates/collab/migrations/20250819022421_add_orb_subscription_id_to_billing_subscriptions.sql b/crates/collab/migrations/20250819022421_add_orb_subscription_id_to_billing_subscriptions.sql new file mode 100644 index 0000000000000000000000000000000000000000..317f6a7653e3d1762f74e795a17d2f99b3831201 --- /dev/null +++ b/crates/collab/migrations/20250819022421_add_orb_subscription_id_to_billing_subscriptions.sql @@ -0,0 +1,2 @@ +alter table billing_subscriptions + add column orb_subscription_id text; diff --git a/crates/collab/migrations/20250819225916_make_stripe_fields_optional_on_billing_subscription.sql b/crates/collab/migrations/20250819225916_make_stripe_fields_optional_on_billing_subscription.sql new file mode 100644 index 0000000000000000000000000000000000000000..cf3b79da60be98da8dd78a2bcb01f7532be7fc59 --- /dev/null +++ b/crates/collab/migrations/20250819225916_make_stripe_fields_optional_on_billing_subscription.sql @@ -0,0 +1,3 @@ +alter table billing_subscriptions + alter column stripe_subscription_id drop not null, + alter column stripe_subscription_status drop not null; diff --git a/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql b/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql new file mode 100644 index 0000000000000000000000000000000000000000..89a42ab82bd97f487a426ef1fa0a08aa5b0c8396 --- /dev/null +++ b/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql @@ -0,0 +1,4 @@ +alter table billing_subscriptions + add column orb_subscription_status text, + add column orb_current_billing_period_start_date timestamp without time zone, + add column orb_current_billing_period_end_date timestamp without time zone; diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs index 6cf3f68f54eda75ac19950c53cf535ff30a107a9..0cc7e2b2e93969ba7b8942838e4afcee251a20d9 100644 --- a/crates/collab/src/api.rs +++ b/crates/collab/src/api.rs @@ -1,19 +1,11 @@ -pub mod billing; pub mod contributors; pub mod events; pub mod extensions; pub mod ips_file; pub mod slack; -use crate::db::Database; -use crate::{ - AppState, Error, Result, auth, - db::{User, UserId}, - rpc, -}; -use ::rpc::proto; +use crate::{AppState, Error, Result, auth, db::UserId, rpc}; use anyhow::Context as _; -use axum::extract; use axum::{ Extension, Json, Router, body::Body, @@ -25,7 +17,6 @@ use axum::{ routing::{get, post}, }; use axum_extra::response::ErasedJson; -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::sync::{Arc, OnceLock}; use tower::ServiceBuilder; @@ -100,10 +91,7 @@ impl std::fmt::Display for SystemIdHeader { pub fn routes(rpc_server: Arc) -> Router<(), Body> { Router::new() - .route("/users/look_up", get(look_up_user)) .route("/users/:id/access_tokens", post(create_access_token)) - .route("/users/:id/refresh_llm_tokens", post(refresh_llm_tokens)) - .route("/users/:id/update_plan", post(update_plan)) .route("/rpc_server_snapshot", get(get_rpc_server_snapshot)) .merge(contributors::router()) .layer( @@ -144,99 +132,6 @@ pub async fn validate_api_token(req: Request, next: Next) -> impl IntoR Ok::<_, Error>(next.run(req).await) } -#[derive(Debug, Deserialize)] -struct LookUpUserParams { - identifier: String, -} - -#[derive(Debug, Serialize)] -struct LookUpUserResponse { - user: Option, -} - -async fn look_up_user( - Query(params): Query, - Extension(app): Extension>, -) -> Result> { - let user = resolve_identifier_to_user(&app.db, ¶ms.identifier).await?; - let user = if let Some(user) = user { - match user { - UserOrId::User(user) => Some(user), - UserOrId::Id(id) => app.db.get_user_by_id(id).await?, - } - } else { - None - }; - - Ok(Json(LookUpUserResponse { user })) -} - -enum UserOrId { - User(User), - Id(UserId), -} - -async fn resolve_identifier_to_user( - db: &Arc, - identifier: &str, -) -> Result> { - if let Some(identifier) = identifier.parse::().ok() { - let user = db.get_user_by_id(UserId(identifier)).await?; - - return Ok(user.map(UserOrId::User)); - } - - if identifier.starts_with("cus_") { - let billing_customer = db - .get_billing_customer_by_stripe_customer_id(&identifier) - .await?; - - return Ok(billing_customer.map(|billing_customer| UserOrId::Id(billing_customer.user_id))); - } - - if identifier.starts_with("sub_") { - let billing_subscription = db - .get_billing_subscription_by_stripe_subscription_id(&identifier) - .await?; - - if let Some(billing_subscription) = billing_subscription { - let billing_customer = db - .get_billing_customer_by_id(billing_subscription.billing_customer_id) - .await?; - - return Ok( - billing_customer.map(|billing_customer| UserOrId::Id(billing_customer.user_id)) - ); - } else { - return Ok(None); - } - } - - if identifier.contains('@') { - let user = db.get_user_by_email(identifier).await?; - - return Ok(user.map(UserOrId::User)); - } - - if let Some(user) = db.get_user_by_github_login(identifier).await? { - return Ok(Some(UserOrId::User(user))); - } - - Ok(None) -} - -#[derive(Deserialize, Debug)] -struct CreateUserParams { - github_user_id: i32, - github_login: String, - email_address: String, - email_confirmation_code: Option, - #[serde(default)] - admin: bool, - #[serde(default)] - invite_count: i32, -} - async fn get_rpc_server_snapshot( Extension(rpc_server): Extension>, ) -> Result { @@ -295,90 +190,3 @@ async fn create_access_token( encrypted_access_token, })) } - -#[derive(Serialize)] -struct RefreshLlmTokensResponse {} - -async fn refresh_llm_tokens( - Path(user_id): Path, - Extension(rpc_server): Extension>, -) -> Result> { - rpc_server.refresh_llm_tokens_for_user(user_id).await; - - Ok(Json(RefreshLlmTokensResponse {})) -} - -#[derive(Debug, Serialize, Deserialize)] -struct UpdatePlanBody { - pub plan: cloud_llm_client::Plan, - pub subscription_period: SubscriptionPeriod, - pub usage: cloud_llm_client::CurrentUsage, - pub trial_started_at: Option>, - pub is_usage_based_billing_enabled: bool, - pub is_account_too_young: bool, - pub has_overdue_invoices: bool, -} - -#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] -struct SubscriptionPeriod { - pub started_at: DateTime, - pub ended_at: DateTime, -} - -#[derive(Serialize)] -struct UpdatePlanResponse {} - -async fn update_plan( - Path(user_id): Path, - Extension(rpc_server): Extension>, - extract::Json(body): extract::Json, -) -> Result> { - let plan = match body.plan { - cloud_llm_client::Plan::ZedFree => proto::Plan::Free, - cloud_llm_client::Plan::ZedPro => proto::Plan::ZedPro, - cloud_llm_client::Plan::ZedProTrial => proto::Plan::ZedProTrial, - }; - - let update_user_plan = proto::UpdateUserPlan { - plan: plan.into(), - trial_started_at: body - .trial_started_at - .map(|trial_started_at| trial_started_at.timestamp() as u64), - is_usage_based_billing_enabled: Some(body.is_usage_based_billing_enabled), - usage: Some(proto::SubscriptionUsage { - model_requests_usage_amount: body.usage.model_requests.used, - model_requests_usage_limit: Some(usage_limit_to_proto(body.usage.model_requests.limit)), - edit_predictions_usage_amount: body.usage.edit_predictions.used, - edit_predictions_usage_limit: Some(usage_limit_to_proto( - body.usage.edit_predictions.limit, - )), - }), - subscription_period: Some(proto::SubscriptionPeriod { - started_at: body.subscription_period.started_at.timestamp() as u64, - ended_at: body.subscription_period.ended_at.timestamp() as u64, - }), - account_too_young: Some(body.is_account_too_young), - has_overdue_invoices: Some(body.has_overdue_invoices), - }; - - rpc_server - .update_plan_for_user(user_id, update_user_plan) - .await?; - - Ok(Json(UpdatePlanResponse {})) -} - -fn usage_limit_to_proto(limit: cloud_llm_client::UsageLimit) -> proto::UsageLimit { - proto::UsageLimit { - variant: Some(match limit { - cloud_llm_client::UsageLimit::Limited(limit) => { - proto::usage_limit::Variant::Limited(proto::usage_limit::Limited { - limit: limit as u32, - }) - } - cloud_llm_client::UsageLimit::Unlimited => { - proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {}) - } - }), - } -} diff --git a/crates/collab/src/api/billing.rs b/crates/collab/src/api/billing.rs deleted file mode 100644 index a0325d14c4a1b9f4221b17b446983b17f767fcbe..0000000000000000000000000000000000000000 --- a/crates/collab/src/api/billing.rs +++ /dev/null @@ -1,59 +0,0 @@ -use std::sync::Arc; -use stripe::SubscriptionStatus; - -use crate::AppState; -use crate::db::billing_subscription::StripeSubscriptionStatus; -use crate::db::{CreateBillingCustomerParams, billing_customer}; -use crate::stripe_client::{StripeClient, StripeCustomerId}; - -impl From for StripeSubscriptionStatus { - fn from(value: SubscriptionStatus) -> Self { - match value { - SubscriptionStatus::Incomplete => Self::Incomplete, - SubscriptionStatus::IncompleteExpired => Self::IncompleteExpired, - SubscriptionStatus::Trialing => Self::Trialing, - SubscriptionStatus::Active => Self::Active, - SubscriptionStatus::PastDue => Self::PastDue, - SubscriptionStatus::Canceled => Self::Canceled, - SubscriptionStatus::Unpaid => Self::Unpaid, - SubscriptionStatus::Paused => Self::Paused, - } - } -} - -/// Finds or creates a billing customer using the provided customer. -pub async fn find_or_create_billing_customer( - app: &Arc, - stripe_client: &dyn StripeClient, - customer_id: &StripeCustomerId, -) -> anyhow::Result> { - // If we already have a billing customer record associated with the Stripe customer, - // there's nothing more we need to do. - if let Some(billing_customer) = app - .db - .get_billing_customer_by_stripe_customer_id(customer_id.0.as_ref()) - .await? - { - return Ok(Some(billing_customer)); - } - - let customer = stripe_client.get_customer(customer_id).await?; - - let Some(email) = customer.email else { - return Ok(None); - }; - - let Some(user) = app.db.get_user_by_email(&email).await? else { - return Ok(None); - }; - - let billing_customer = app - .db - .create_billing_customer(&CreateBillingCustomerParams { - user_id: user.id, - stripe_customer_id: customer.id.to_string(), - }) - .await?; - - Ok(Some(billing_customer)) -} diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 2f34a843a860d9d2933a4819788d0f9285473edf..da78a980693bec2243d872092a4f373698958b7a 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -149,35 +149,35 @@ pub async fn post_crash( "crash report" ); - if let Some(kinesis_client) = app.kinesis_client.clone() { - if let Some(stream) = app.config.kinesis_stream.clone() { - let properties = json!({ - "app_version": report.header.app_version, - "os_version": report.header.os_version, - "os_name": "macOS", - "bundle_id": report.header.bundle_id, - "incident_id": report.header.incident_id, - "installation_id": installation_id, - "description": description, - "backtrace": summary, - }); - let row = SnowflakeRow::new( - "Crash Reported", - None, - false, - Some(installation_id), - properties, - ); - let data = serde_json::to_vec(&row)?; - kinesis_client - .put_record() - .stream_name(stream) - .partition_key(row.insert_id.unwrap_or_default()) - .data(data.into()) - .send() - .await - .log_err(); - } + if let Some(kinesis_client) = app.kinesis_client.clone() + && let Some(stream) = app.config.kinesis_stream.clone() + { + let properties = json!({ + "app_version": report.header.app_version, + "os_version": report.header.os_version, + "os_name": "macOS", + "bundle_id": report.header.bundle_id, + "incident_id": report.header.incident_id, + "installation_id": installation_id, + "description": description, + "backtrace": summary, + }); + let row = SnowflakeRow::new( + "Crash Reported", + None, + false, + Some(installation_id), + properties, + ); + let data = serde_json::to_vec(&row)?; + kinesis_client + .put_record() + .stream_name(stream) + .partition_key(row.insert_id.unwrap_or_default()) + .data(data.into()) + .send() + .await + .log_err(); } if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { @@ -280,7 +280,7 @@ pub async fn post_hang( service = "client", version = %report.app_version.unwrap_or_default().to_string(), os_name = %report.os_name, - os_version = report.os_version.unwrap_or_default().to_string(), + os_version = report.os_version.unwrap_or_default(), incident_id = %incident_id, installation_id = %report.installation_id.unwrap_or_default(), backtrace = %backtrace, @@ -359,34 +359,34 @@ pub async fn post_panic( "panic report" ); - if let Some(kinesis_client) = app.kinesis_client.clone() { - if let Some(stream) = app.config.kinesis_stream.clone() { - let properties = json!({ - "app_version": panic.app_version, - "os_name": panic.os_name, - "os_version": panic.os_version, - "incident_id": incident_id, - "installation_id": panic.installation_id, - "description": panic.payload, - "backtrace": backtrace, - }); - let row = SnowflakeRow::new( - "Panic Reported", - None, - false, - panic.installation_id.clone(), - properties, - ); - let data = serde_json::to_vec(&row)?; - kinesis_client - .put_record() - .stream_name(stream) - .partition_key(row.insert_id.unwrap_or_default()) - .data(data.into()) - .send() - .await - .log_err(); - } + if let Some(kinesis_client) = app.kinesis_client.clone() + && let Some(stream) = app.config.kinesis_stream.clone() + { + let properties = json!({ + "app_version": panic.app_version, + "os_name": panic.os_name, + "os_version": panic.os_version, + "incident_id": incident_id, + "installation_id": panic.installation_id, + "description": panic.payload, + "backtrace": backtrace, + }); + let row = SnowflakeRow::new( + "Panic Reported", + None, + false, + panic.installation_id.clone(), + properties, + ); + let data = serde_json::to_vec(&row)?; + kinesis_client + .put_record() + .stream_name(stream) + .partition_key(row.insert_id.unwrap_or_default()) + .data(data.into()) + .send() + .await + .log_err(); } if !report_to_slack(&panic) { @@ -518,31 +518,31 @@ pub async fn post_events( let first_event_at = chrono::Utc::now() - chrono::Duration::milliseconds(last_event.milliseconds_since_first_event); - if let Some(kinesis_client) = app.kinesis_client.clone() { - if let Some(stream) = app.config.kinesis_stream.clone() { - let mut request = kinesis_client.put_records().stream_name(stream); - let mut has_records = false; - for row in for_snowflake( - request_body.clone(), - first_event_at, - country_code.clone(), - checksum_matched, - ) { - if let Some(data) = serde_json::to_vec(&row).log_err() { - request = request.records( - aws_sdk_kinesis::types::PutRecordsRequestEntry::builder() - .partition_key(request_body.system_id.clone().unwrap_or_default()) - .data(data.into()) - .build() - .unwrap(), - ); - has_records = true; - } - } - if has_records { - request.send().await.log_err(); + if let Some(kinesis_client) = app.kinesis_client.clone() + && let Some(stream) = app.config.kinesis_stream.clone() + { + let mut request = kinesis_client.put_records().stream_name(stream); + let mut has_records = false; + for row in for_snowflake( + request_body.clone(), + first_event_at, + country_code.clone(), + checksum_matched, + ) { + if let Some(data) = serde_json::to_vec(&row).log_err() { + request = request.records( + aws_sdk_kinesis::types::PutRecordsRequestEntry::builder() + .partition_key(request_body.system_id.clone().unwrap_or_default()) + .data(data.into()) + .build() + .unwrap(), + ); + has_records = true; } } + if has_records { + request.send().await.log_err(); + } }; Ok(()) @@ -564,170 +564,10 @@ fn for_snowflake( country_code: Option, checksum_matched: bool, ) -> impl Iterator { - body.events.into_iter().filter_map(move |event| { + body.events.into_iter().map(move |event| { let timestamp = first_event_at + Duration::milliseconds(event.milliseconds_since_first_event); - // We will need to double check, but I believe all of the events that - // are being transformed here are now migrated over to use the - // telemetry::event! macro, as of this commit so this code can go away - // when we feel enough users have upgraded past this point. let (event_type, mut event_properties) = match &event.event { - Event::Editor(e) => ( - match e.operation.as_str() { - "open" => "Editor Opened".to_string(), - "save" => "Editor Saved".to_string(), - _ => format!("Unknown Editor Event: {}", e.operation), - }, - serde_json::to_value(e).unwrap(), - ), - Event::EditPrediction(e) => ( - format!( - "Edit Prediction {}", - if e.suggestion_accepted { - "Accepted" - } else { - "Discarded" - } - ), - serde_json::to_value(e).unwrap(), - ), - Event::EditPredictionRating(e) => ( - "Edit Prediction Rated".to_string(), - serde_json::to_value(e).unwrap(), - ), - Event::Call(e) => { - let event_type = match e.operation.trim() { - "unshare project" => "Project Unshared".to_string(), - "open channel notes" => "Channel Notes Opened".to_string(), - "share project" => "Project Shared".to_string(), - "join channel" => "Channel Joined".to_string(), - "hang up" => "Call Ended".to_string(), - "accept incoming" => "Incoming Call Accepted".to_string(), - "invite" => "Participant Invited".to_string(), - "disable microphone" => "Microphone Disabled".to_string(), - "enable microphone" => "Microphone Enabled".to_string(), - "enable screen share" => "Screen Share Enabled".to_string(), - "disable screen share" => "Screen Share Disabled".to_string(), - "decline incoming" => "Incoming Call Declined".to_string(), - _ => format!("Unknown Call Event: {}", e.operation), - }; - - (event_type, serde_json::to_value(e).unwrap()) - } - Event::Assistant(e) => ( - match e.phase { - telemetry_events::AssistantPhase::Response => "Assistant Responded".to_string(), - telemetry_events::AssistantPhase::Invoked => "Assistant Invoked".to_string(), - telemetry_events::AssistantPhase::Accepted => { - "Assistant Response Accepted".to_string() - } - telemetry_events::AssistantPhase::Rejected => { - "Assistant Response Rejected".to_string() - } - }, - serde_json::to_value(e).unwrap(), - ), - Event::Cpu(_) | Event::Memory(_) => return None, - Event::App(e) => { - let mut properties = json!({}); - let event_type = match e.operation.trim() { - // App - "open" => "App Opened".to_string(), - "first open" => "App First Opened".to_string(), - "first open for release channel" => { - "App First Opened For Release Channel".to_string() - } - "close" => "App Closed".to_string(), - - // Project - "open project" => "Project Opened".to_string(), - "open node project" => { - properties["project_type"] = json!("node"); - "Project Opened".to_string() - } - "open pnpm project" => { - properties["project_type"] = json!("pnpm"); - "Project Opened".to_string() - } - "open yarn project" => { - properties["project_type"] = json!("yarn"); - "Project Opened".to_string() - } - - // SSH - "create ssh server" => "SSH Server Created".to_string(), - "create ssh project" => "SSH Project Created".to_string(), - "open ssh project" => "SSH Project Opened".to_string(), - - // Welcome Page - "welcome page: change keymap" => "Welcome Keymap Changed".to_string(), - "welcome page: change theme" => "Welcome Theme Changed".to_string(), - "welcome page: close" => "Welcome Page Closed".to_string(), - "welcome page: edit settings" => "Welcome Settings Edited".to_string(), - "welcome page: install cli" => "Welcome CLI Installed".to_string(), - "welcome page: open" => "Welcome Page Opened".to_string(), - "welcome page: open extensions" => "Welcome Extensions Page Opened".to_string(), - "welcome page: sign in to copilot" => "Welcome Copilot Signed In".to_string(), - "welcome page: toggle diagnostic telemetry" => { - "Welcome Diagnostic Telemetry Toggled".to_string() - } - "welcome page: toggle metric telemetry" => { - "Welcome Metric Telemetry Toggled".to_string() - } - "welcome page: toggle vim" => "Welcome Vim Mode Toggled".to_string(), - "welcome page: view docs" => "Welcome Documentation Viewed".to_string(), - - // Extensions - "extensions page: open" => "Extensions Page Opened".to_string(), - "extensions: install extension" => "Extension Installed".to_string(), - "extensions: uninstall extension" => "Extension Uninstalled".to_string(), - - // Misc - "markdown preview: open" => "Markdown Preview Opened".to_string(), - "project diagnostics: open" => "Project Diagnostics Opened".to_string(), - "project search: open" => "Project Search Opened".to_string(), - "repl sessions: open" => "REPL Session Started".to_string(), - - // Feature Upsell - "feature upsell: toggle vim" => { - properties["source"] = json!("Feature Upsell"); - "Vim Mode Toggled".to_string() - } - _ => e - .operation - .strip_prefix("feature upsell: viewed docs (") - .and_then(|s| s.strip_suffix(')')) - .map_or_else( - || format!("Unknown App Event: {}", e.operation), - |docs_url| { - properties["url"] = json!(docs_url); - properties["source"] = json!("Feature Upsell"); - "Documentation Viewed".to_string() - }, - ), - }; - (event_type, properties) - } - Event::Setting(e) => ( - "Settings Changed".to_string(), - serde_json::to_value(e).unwrap(), - ), - Event::Extension(e) => ( - "Extension Loaded".to_string(), - serde_json::to_value(e).unwrap(), - ), - Event::Edit(e) => ( - "Editor Edited".to_string(), - serde_json::to_value(e).unwrap(), - ), - Event::Action(e) => ( - "Action Invoked".to_string(), - serde_json::to_value(e).unwrap(), - ), - Event::Repl(e) => ( - "Kernel Status Changed".to_string(), - serde_json::to_value(e).unwrap(), - ), Event::Flexible(e) => ( e.event_type.clone(), serde_json::to_value(&e.event_properties).unwrap(), @@ -759,7 +599,7 @@ fn for_snowflake( }) }); - Some(SnowflakeRow { + SnowflakeRow { time: timestamp, user_id: body.metrics_id.clone(), device_id: body.system_id.clone(), @@ -767,7 +607,7 @@ fn for_snowflake( event_properties, user_properties, insert_id: Some(Uuid::new_v4().to_string()), - }) + } }) } diff --git a/crates/collab/src/api/extensions.rs b/crates/collab/src/api/extensions.rs index 9170c39e472d33420fc889972e4c96e44914db15..1ace433db298be7ffd159128b54b194395ba4fe5 100644 --- a/crates/collab/src/api/extensions.rs +++ b/crates/collab/src/api/extensions.rs @@ -337,8 +337,7 @@ async fn fetch_extensions_from_blob_store( if known_versions .binary_search_by_key(&published_version, |known_version| known_version) .is_err() - { - if let Some(extension) = fetch_extension_manifest( + && let Some(extension) = fetch_extension_manifest( blob_store_client, blob_store_bucket, extension_id, @@ -346,12 +345,11 @@ async fn fetch_extensions_from_blob_store( ) .await .log_err() - { - new_versions - .entry(extension_id) - .or_default() - .push(extension); - } + { + new_versions + .entry(extension_id) + .or_default() + .push(extension); } } } diff --git a/crates/collab/src/auth.rs b/crates/collab/src/auth.rs index 00f37c675874ce200cf79f2e8763450f4494fc79..e484d6b510f444e764ac38210d6a5cfc42142807 100644 --- a/crates/collab/src/auth.rs +++ b/crates/collab/src/auth.rs @@ -79,27 +79,27 @@ pub async fn validate_header(mut req: Request, next: Next) -> impl Into verify_access_token(access_token, user_id, &state.db).await }; - if let Ok(validate_result) = validate_result { - if validate_result.is_valid { - let user = state + if let Ok(validate_result) = validate_result + && validate_result.is_valid + { + let user = state + .db + .get_user_by_id(user_id) + .await? + .with_context(|| format!("user {user_id} not found"))?; + + if let Some(impersonator_id) = validate_result.impersonator_id { + let admin = state .db - .get_user_by_id(user_id) + .get_user_by_id(impersonator_id) .await? - .with_context(|| format!("user {user_id} not found"))?; - - if let Some(impersonator_id) = validate_result.impersonator_id { - let admin = state - .db - .get_user_by_id(impersonator_id) - .await? - .with_context(|| format!("user {impersonator_id} not found"))?; - req.extensions_mut() - .insert(Principal::Impersonated { user, admin }); - } else { - req.extensions_mut().insert(Principal::User(user)); - }; - return Ok::<_, Error>(next.run(req).await); - } + .with_context(|| format!("user {impersonator_id} not found"))?; + req.extensions_mut() + .insert(Principal::Impersonated { user, admin }); + } else { + req.extensions_mut().insert(Principal::User(user)); + }; + return Ok::<_, Error>(next.run(req).await); } Err(Error::http( @@ -236,7 +236,7 @@ mod test { #[gpui::test] async fn test_verify_access_token(cx: &mut gpui::TestAppContext) { - let test_db = crate::db::TestDb::sqlite(cx.executor().clone()); + let test_db = crate::db::TestDb::sqlite(cx.executor()); let db = test_db.db(); let user = db diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 2c22ca206945eb02752680b6149d7796643ee938..95a485305ca31bde351f4962d1678e30801d8a01 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -41,12 +41,7 @@ use worktree_settings_file::LocalSettingsKind; pub use tests::TestDb; pub use ids::*; -pub use queries::billing_customers::{CreateBillingCustomerParams, UpdateBillingCustomerParams}; -pub use queries::billing_subscriptions::{ - CreateBillingSubscriptionParams, UpdateBillingSubscriptionParams, -}; pub use queries::contributors::ContributorSelector; -pub use queries::processed_stripe_events::CreateProcessedStripeEventParams; pub use sea_orm::ConnectOptions; pub use tables::user::Model as User; pub use tables::*; @@ -690,7 +685,7 @@ impl LocalSettingsKind { } } - pub fn to_proto(&self) -> proto::LocalSettingsKind { + pub fn to_proto(self) -> proto::LocalSettingsKind { match self { Self::Settings => proto::LocalSettingsKind::Settings, Self::Tasks => proto::LocalSettingsKind::Tasks, diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 2ba7ec10514d8b0bbf4b26eab0b9384b3911204e..8f116cfd633749b21ff197a723f9e779a750b561 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -70,9 +70,6 @@ macro_rules! id_type { } id_type!(AccessTokenId); -id_type!(BillingCustomerId); -id_type!(BillingSubscriptionId); -id_type!(BillingPreferencesId); id_type!(BufferId); id_type!(ChannelBufferCollaboratorId); id_type!(ChannelChatParticipantId); diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 64b627e47518e0c18eedd8c625a0c98b678a96cc..95e45dc00451dae27a98fd68c492f1047dea9804 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -1,9 +1,6 @@ use super::*; pub mod access_tokens; -pub mod billing_customers; -pub mod billing_preferences; -pub mod billing_subscriptions; pub mod buffers; pub mod channels; pub mod contacts; @@ -12,7 +9,6 @@ pub mod embeddings; pub mod extensions; pub mod messages; pub mod notifications; -pub mod processed_stripe_events; pub mod projects; pub mod rooms; pub mod servers; diff --git a/crates/collab/src/db/queries/billing_customers.rs b/crates/collab/src/db/queries/billing_customers.rs deleted file mode 100644 index ead9e6cd32dc4e52a5c0e2438e9e8ff97735a255..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/billing_customers.rs +++ /dev/null @@ -1,100 +0,0 @@ -use super::*; - -#[derive(Debug)] -pub struct CreateBillingCustomerParams { - pub user_id: UserId, - pub stripe_customer_id: String, -} - -#[derive(Debug, Default)] -pub struct UpdateBillingCustomerParams { - pub user_id: ActiveValue, - pub stripe_customer_id: ActiveValue, - pub has_overdue_invoices: ActiveValue, - pub trial_started_at: ActiveValue>, -} - -impl Database { - /// Creates a new billing customer. - pub async fn create_billing_customer( - &self, - params: &CreateBillingCustomerParams, - ) -> Result { - self.transaction(|tx| async move { - let customer = billing_customer::Entity::insert(billing_customer::ActiveModel { - user_id: ActiveValue::set(params.user_id), - stripe_customer_id: ActiveValue::set(params.stripe_customer_id.clone()), - ..Default::default() - }) - .exec_with_returning(&*tx) - .await?; - - Ok(customer) - }) - .await - } - - /// Updates the specified billing customer. - pub async fn update_billing_customer( - &self, - id: BillingCustomerId, - params: &UpdateBillingCustomerParams, - ) -> Result<()> { - self.transaction(|tx| async move { - billing_customer::Entity::update(billing_customer::ActiveModel { - id: ActiveValue::set(id), - user_id: params.user_id.clone(), - stripe_customer_id: params.stripe_customer_id.clone(), - has_overdue_invoices: params.has_overdue_invoices.clone(), - trial_started_at: params.trial_started_at.clone(), - created_at: ActiveValue::not_set(), - }) - .exec(&*tx) - .await?; - - Ok(()) - }) - .await - } - - pub async fn get_billing_customer_by_id( - &self, - id: BillingCustomerId, - ) -> Result> { - self.transaction(|tx| async move { - Ok(billing_customer::Entity::find() - .filter(billing_customer::Column::Id.eq(id)) - .one(&*tx) - .await?) - }) - .await - } - - /// Returns the billing customer for the user with the specified ID. - pub async fn get_billing_customer_by_user_id( - &self, - user_id: UserId, - ) -> Result> { - self.transaction(|tx| async move { - Ok(billing_customer::Entity::find() - .filter(billing_customer::Column::UserId.eq(user_id)) - .one(&*tx) - .await?) - }) - .await - } - - /// Returns the billing customer for the user with the specified Stripe customer ID. - pub async fn get_billing_customer_by_stripe_customer_id( - &self, - stripe_customer_id: &str, - ) -> Result> { - self.transaction(|tx| async move { - Ok(billing_customer::Entity::find() - .filter(billing_customer::Column::StripeCustomerId.eq(stripe_customer_id)) - .one(&*tx) - .await?) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/billing_preferences.rs b/crates/collab/src/db/queries/billing_preferences.rs deleted file mode 100644 index f370964ecd7d5c762c88e5fb572fde84ce81935d..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/billing_preferences.rs +++ /dev/null @@ -1,17 +0,0 @@ -use super::*; - -impl Database { - /// Returns the billing preferences for the given user, if they exist. - pub async fn get_billing_preferences( - &self, - user_id: UserId, - ) -> Result> { - self.transaction(|tx| async move { - Ok(billing_preference::Entity::find() - .filter(billing_preference::Column::UserId.eq(user_id)) - .one(&*tx) - .await?) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/billing_subscriptions.rs b/crates/collab/src/db/queries/billing_subscriptions.rs deleted file mode 100644 index 8361d6b4d07f8e6b59f9c7b39b18057e6f62b3c0..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/billing_subscriptions.rs +++ /dev/null @@ -1,158 +0,0 @@ -use anyhow::Context as _; - -use crate::db::billing_subscription::{ - StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind, -}; - -use super::*; - -#[derive(Debug)] -pub struct CreateBillingSubscriptionParams { - pub billing_customer_id: BillingCustomerId, - pub kind: Option, - pub stripe_subscription_id: String, - pub stripe_subscription_status: StripeSubscriptionStatus, - pub stripe_cancellation_reason: Option, - pub stripe_current_period_start: Option, - pub stripe_current_period_end: Option, -} - -#[derive(Debug, Default)] -pub struct UpdateBillingSubscriptionParams { - pub billing_customer_id: ActiveValue, - pub kind: ActiveValue>, - pub stripe_subscription_id: ActiveValue, - pub stripe_subscription_status: ActiveValue, - pub stripe_cancel_at: ActiveValue>, - pub stripe_cancellation_reason: ActiveValue>, - pub stripe_current_period_start: ActiveValue>, - pub stripe_current_period_end: ActiveValue>, -} - -impl Database { - /// Creates a new billing subscription. - pub async fn create_billing_subscription( - &self, - params: &CreateBillingSubscriptionParams, - ) -> Result { - self.transaction(|tx| async move { - let id = billing_subscription::Entity::insert(billing_subscription::ActiveModel { - billing_customer_id: ActiveValue::set(params.billing_customer_id), - kind: ActiveValue::set(params.kind), - stripe_subscription_id: ActiveValue::set(params.stripe_subscription_id.clone()), - stripe_subscription_status: ActiveValue::set(params.stripe_subscription_status), - stripe_cancellation_reason: ActiveValue::set(params.stripe_cancellation_reason), - stripe_current_period_start: ActiveValue::set(params.stripe_current_period_start), - stripe_current_period_end: ActiveValue::set(params.stripe_current_period_end), - ..Default::default() - }) - .exec(&*tx) - .await? - .last_insert_id; - - Ok(billing_subscription::Entity::find_by_id(id) - .one(&*tx) - .await? - .context("failed to retrieve inserted billing subscription")?) - }) - .await - } - - /// Updates the specified billing subscription. - pub async fn update_billing_subscription( - &self, - id: BillingSubscriptionId, - params: &UpdateBillingSubscriptionParams, - ) -> Result<()> { - self.transaction(|tx| async move { - billing_subscription::Entity::update(billing_subscription::ActiveModel { - id: ActiveValue::set(id), - billing_customer_id: params.billing_customer_id.clone(), - kind: params.kind.clone(), - stripe_subscription_id: params.stripe_subscription_id.clone(), - stripe_subscription_status: params.stripe_subscription_status.clone(), - stripe_cancel_at: params.stripe_cancel_at.clone(), - stripe_cancellation_reason: params.stripe_cancellation_reason.clone(), - stripe_current_period_start: params.stripe_current_period_start.clone(), - stripe_current_period_end: params.stripe_current_period_end.clone(), - created_at: ActiveValue::not_set(), - }) - .exec(&*tx) - .await?; - - Ok(()) - }) - .await - } - - /// Returns the billing subscription with the specified Stripe subscription ID. - pub async fn get_billing_subscription_by_stripe_subscription_id( - &self, - stripe_subscription_id: &str, - ) -> Result> { - self.transaction(|tx| async move { - Ok(billing_subscription::Entity::find() - .filter( - billing_subscription::Column::StripeSubscriptionId.eq(stripe_subscription_id), - ) - .one(&*tx) - .await?) - }) - .await - } - - pub async fn get_active_billing_subscription( - &self, - user_id: UserId, - ) -> Result> { - self.transaction(|tx| async move { - Ok(billing_subscription::Entity::find() - .inner_join(billing_customer::Entity) - .filter(billing_customer::Column::UserId.eq(user_id)) - .filter( - Condition::all() - .add( - Condition::any() - .add( - billing_subscription::Column::StripeSubscriptionStatus - .eq(StripeSubscriptionStatus::Active), - ) - .add( - billing_subscription::Column::StripeSubscriptionStatus - .eq(StripeSubscriptionStatus::Trialing), - ), - ) - .add(billing_subscription::Column::Kind.is_not_null()), - ) - .one(&*tx) - .await?) - }) - .await - } - - /// Returns whether the user has an active billing subscription. - pub async fn has_active_billing_subscription(&self, user_id: UserId) -> Result { - Ok(self.count_active_billing_subscriptions(user_id).await? > 0) - } - - /// Returns the count of the active billing subscriptions for the user with the specified ID. - pub async fn count_active_billing_subscriptions(&self, user_id: UserId) -> Result { - self.transaction(|tx| async move { - let count = billing_subscription::Entity::find() - .inner_join(billing_customer::Entity) - .filter( - billing_customer::Column::UserId.eq(user_id).and( - billing_subscription::Column::StripeSubscriptionStatus - .eq(StripeSubscriptionStatus::Active) - .or(billing_subscription::Column::StripeSubscriptionStatus - .eq(StripeSubscriptionStatus::Trialing)), - ), - ) - .count(&*tx) - .await?; - - Ok(count as usize) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs index 7d8aad2be4bd3581cbdbe3dc3a1dfbc935f81966..f218ff28507cf51a72cd0aa00a044ad75f64f839 100644 --- a/crates/collab/src/db/queries/extensions.rs +++ b/crates/collab/src/db/queries/extensions.rs @@ -87,10 +87,10 @@ impl Database { continue; }; - if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) { - if max_extension_version > &extension_version { - continue; - } + if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) + && max_extension_version > &extension_version + { + continue; } if let Some(constraints) = constraints { @@ -331,10 +331,10 @@ impl Database { .exec_without_returning(&*tx) .await?; - if let Ok(db_version) = semver::Version::parse(&extension.latest_version) { - if db_version >= latest_version.version { - continue; - } + if let Ok(db_version) = semver::Version::parse(&extension.latest_version) + && db_version >= latest_version.version + { + continue; } let mut extension = extension.into_active_model(); diff --git a/crates/collab/src/db/queries/processed_stripe_events.rs b/crates/collab/src/db/queries/processed_stripe_events.rs deleted file mode 100644 index f14ad480e09fb4c0d6d43569b03e7888e9929cf4..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/processed_stripe_events.rs +++ /dev/null @@ -1,69 +0,0 @@ -use super::*; - -#[derive(Debug)] -pub struct CreateProcessedStripeEventParams { - pub stripe_event_id: String, - pub stripe_event_type: String, - pub stripe_event_created_timestamp: i64, -} - -impl Database { - /// Creates a new processed Stripe event. - pub async fn create_processed_stripe_event( - &self, - params: &CreateProcessedStripeEventParams, - ) -> Result<()> { - self.transaction(|tx| async move { - processed_stripe_event::Entity::insert(processed_stripe_event::ActiveModel { - stripe_event_id: ActiveValue::set(params.stripe_event_id.clone()), - stripe_event_type: ActiveValue::set(params.stripe_event_type.clone()), - stripe_event_created_timestamp: ActiveValue::set( - params.stripe_event_created_timestamp, - ), - ..Default::default() - }) - .exec_without_returning(&*tx) - .await?; - - Ok(()) - }) - .await - } - - /// Returns the processed Stripe event with the specified event ID. - pub async fn get_processed_stripe_event_by_event_id( - &self, - event_id: &str, - ) -> Result> { - self.transaction(|tx| async move { - Ok(processed_stripe_event::Entity::find_by_id(event_id) - .one(&*tx) - .await?) - }) - .await - } - - /// Returns the processed Stripe events with the specified event IDs. - pub async fn get_processed_stripe_events_by_event_ids( - &self, - event_ids: &[&str], - ) -> Result> { - self.transaction(|tx| async move { - Ok(processed_stripe_event::Entity::find() - .filter( - processed_stripe_event::Column::StripeEventId.is_in(event_ids.iter().copied()), - ) - .all(&*tx) - .await?) - }) - .await - } - - /// Returns whether the Stripe event with the specified ID has already been processed. - pub async fn already_processed_stripe_event(&self, event_id: &str) -> Result { - Ok(self - .get_processed_stripe_event_by_event_id(event_id) - .await? - .is_some()) - } -} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 82f74d910ba0d12c1473719189e066eb9d0307eb..393f2c80f8e733aa2d2b3b5f4b811c9868e0a620 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -349,11 +349,11 @@ impl Database { serde_json::to_string(&repository.current_merge_conflicts) .unwrap(), )), - - // Old clients do not use abs path, entry ids or head_commit_details. + // Old clients do not use abs path, entry ids, head_commit_details, or merge_message. abs_path: ActiveValue::set(String::new()), entry_ids: ActiveValue::set("[]".into()), head_commit_details: ActiveValue::set(None), + merge_message: ActiveValue::set(None), } }), ) @@ -502,6 +502,7 @@ impl Database { current_merge_conflicts: ActiveValue::Set(Some( serde_json::to_string(&update.current_merge_conflicts).unwrap(), )), + merge_message: ActiveValue::set(update.merge_message.clone()), }) .on_conflict( OnConflict::columns([ @@ -515,6 +516,7 @@ impl Database { project_repository::Column::AbsPath, project_repository::Column::CurrentMergeConflicts, project_repository::Column::HeadCommitDetails, + project_repository::Column::MergeMessage, ]) .to_owned(), ) @@ -943,21 +945,21 @@ impl Database { let current_merge_conflicts = db_repository_entry .current_merge_conflicts .as_ref() - .map(|conflicts| serde_json::from_str(&conflicts)) + .map(|conflicts| serde_json::from_str(conflicts)) .transpose()? .unwrap_or_default(); let branch_summary = db_repository_entry .branch_summary .as_ref() - .map(|branch_summary| serde_json::from_str(&branch_summary)) + .map(|branch_summary| serde_json::from_str(branch_summary)) .transpose()? .unwrap_or_default(); let head_commit_details = db_repository_entry .head_commit_details .as_ref() - .map(|head_commit_details| serde_json::from_str(&head_commit_details)) + .map(|head_commit_details| serde_json::from_str(head_commit_details)) .transpose()? .unwrap_or_default(); @@ -990,6 +992,7 @@ impl Database { head_commit_details, scan_id: db_repository_entry.scan_id as u64, is_last_update: true, + merge_message: db_repository_entry.merge_message, }); } } @@ -1318,10 +1321,10 @@ impl Database { .await?; let mut connection_ids = HashSet::default(); - if let Some(host_connection) = project.host_connection().log_err() { - if !exclude_dev_server { - connection_ids.insert(host_connection); - } + if let Some(host_connection) = project.host_connection().log_err() + && !exclude_dev_server + { + connection_ids.insert(host_connection); } while let Some(collaborator) = collaborators.next().await { diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index c63d7133be2ec616a95fa73359a5050c289501bf..9e7cabf9b29c91d7e486f42d5e6b12020b0f514e 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -746,21 +746,21 @@ impl Database { let current_merge_conflicts = db_repository .current_merge_conflicts .as_ref() - .map(|conflicts| serde_json::from_str(&conflicts)) + .map(|conflicts| serde_json::from_str(conflicts)) .transpose()? .unwrap_or_default(); let branch_summary = db_repository .branch_summary .as_ref() - .map(|branch_summary| serde_json::from_str(&branch_summary)) + .map(|branch_summary| serde_json::from_str(branch_summary)) .transpose()? .unwrap_or_default(); let head_commit_details = db_repository .head_commit_details .as_ref() - .map(|head_commit_details| serde_json::from_str(&head_commit_details)) + .map(|head_commit_details| serde_json::from_str(head_commit_details)) .transpose()? .unwrap_or_default(); @@ -793,6 +793,7 @@ impl Database { abs_path: db_repository.abs_path, scan_id: db_repository.scan_id as u64, is_last_update: true, + merge_message: db_repository.merge_message, }); } } diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index d87ab174bd7a70be7ad57fd1871853018fc25763..0082a9fb030a27e4be13af725f08ea9c82217377 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -1,7 +1,4 @@ pub mod access_token; -pub mod billing_customer; -pub mod billing_preference; -pub mod billing_subscription; pub mod buffer; pub mod buffer_operation; pub mod buffer_snapshot; @@ -23,7 +20,6 @@ pub mod notification; pub mod notification_kind; pub mod observed_buffer_edits; pub mod observed_channel_messages; -pub mod processed_stripe_event; pub mod project; pub mod project_collaborator; pub mod project_repository; diff --git a/crates/collab/src/db/tables/billing_customer.rs b/crates/collab/src/db/tables/billing_customer.rs deleted file mode 100644 index e7d4a216e348a74b0cc79a308626fc1a80c508f6..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/billing_customer.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::db::{BillingCustomerId, UserId}; -use sea_orm::entity::prelude::*; - -/// A billing customer. -#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "billing_customers")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: BillingCustomerId, - pub user_id: UserId, - pub stripe_customer_id: String, - pub has_overdue_invoices: bool, - pub trial_started_at: Option, - pub created_at: DateTime, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::user::Entity", - from = "Column::UserId", - to = "super::user::Column::Id" - )] - User, - #[sea_orm(has_many = "super::billing_subscription::Entity")] - BillingSubscription, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::User.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::BillingSubscription.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/billing_preference.rs b/crates/collab/src/db/tables/billing_preference.rs deleted file mode 100644 index c1888d3b2f9c954f0b9bcd38376f191ed383b973..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/billing_preference.rs +++ /dev/null @@ -1,32 +0,0 @@ -use crate::db::{BillingPreferencesId, UserId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "billing_preferences")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: BillingPreferencesId, - pub created_at: DateTime, - pub user_id: UserId, - pub max_monthly_llm_usage_spending_in_cents: i32, - pub model_request_overages_enabled: bool, - pub model_request_overages_spend_limit_in_cents: i32, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::user::Entity", - from = "Column::UserId", - to = "super::user::Column::Id" - )] - User, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::User.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/billing_subscription.rs b/crates/collab/src/db/tables/billing_subscription.rs deleted file mode 100644 index 522973dbc970b69947b8e790e370bfc9fa93aa99..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/billing_subscription.rs +++ /dev/null @@ -1,176 +0,0 @@ -use crate::db::{BillingCustomerId, BillingSubscriptionId}; -use crate::stripe_client; -use chrono::{Datelike as _, NaiveDate, Utc}; -use sea_orm::entity::prelude::*; -use serde::Serialize; - -/// A billing subscription. -#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "billing_subscriptions")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: BillingSubscriptionId, - pub billing_customer_id: BillingCustomerId, - pub kind: Option, - pub stripe_subscription_id: String, - pub stripe_subscription_status: StripeSubscriptionStatus, - pub stripe_cancel_at: Option, - pub stripe_cancellation_reason: Option, - pub stripe_current_period_start: Option, - pub stripe_current_period_end: Option, - pub created_at: DateTime, -} - -impl Model { - pub fn current_period_start_at(&self) -> Option { - let period_start = self.stripe_current_period_start?; - chrono::DateTime::from_timestamp(period_start, 0) - } - - pub fn current_period_end_at(&self) -> Option { - let period_end = self.stripe_current_period_end?; - chrono::DateTime::from_timestamp(period_end, 0) - } - - pub fn current_period( - subscription: Option, - is_staff: bool, - ) -> Option<(DateTimeUtc, DateTimeUtc)> { - if is_staff { - let now = Utc::now(); - let year = now.year(); - let month = now.month(); - - let first_day_of_this_month = - NaiveDate::from_ymd_opt(year, month, 1)?.and_hms_opt(0, 0, 0)?; - - let next_month = if month == 12 { 1 } else { month + 1 }; - let next_month_year = if month == 12 { year + 1 } else { year }; - let first_day_of_next_month = - NaiveDate::from_ymd_opt(next_month_year, next_month, 1)?.and_hms_opt(23, 59, 59)?; - - let last_day_of_this_month = first_day_of_next_month - chrono::Days::new(1); - - Some(( - first_day_of_this_month.and_utc(), - last_day_of_this_month.and_utc(), - )) - } else { - let subscription = subscription?; - let period_start_at = subscription.current_period_start_at()?; - let period_end_at = subscription.current_period_end_at()?; - - Some((period_start_at, period_end_at)) - } - } -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::billing_customer::Entity", - from = "Column::BillingCustomerId", - to = "super::billing_customer::Column::Id" - )] - BillingCustomer, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::BillingCustomer.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Hash, Serialize)] -#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] -#[serde(rename_all = "snake_case")] -pub enum SubscriptionKind { - #[sea_orm(string_value = "zed_pro")] - ZedPro, - #[sea_orm(string_value = "zed_pro_trial")] - ZedProTrial, - #[sea_orm(string_value = "zed_free")] - ZedFree, -} - -impl From for cloud_llm_client::Plan { - fn from(value: SubscriptionKind) -> Self { - match value { - SubscriptionKind::ZedPro => Self::ZedPro, - SubscriptionKind::ZedProTrial => Self::ZedProTrial, - SubscriptionKind::ZedFree => Self::ZedFree, - } - } -} - -/// The status of a Stripe subscription. -/// -/// [Stripe docs](https://docs.stripe.com/api/subscriptions/object#subscription_object-status) -#[derive( - Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash, Serialize, -)] -#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] -#[serde(rename_all = "snake_case")] -pub enum StripeSubscriptionStatus { - #[default] - #[sea_orm(string_value = "incomplete")] - Incomplete, - #[sea_orm(string_value = "incomplete_expired")] - IncompleteExpired, - #[sea_orm(string_value = "trialing")] - Trialing, - #[sea_orm(string_value = "active")] - Active, - #[sea_orm(string_value = "past_due")] - PastDue, - #[sea_orm(string_value = "canceled")] - Canceled, - #[sea_orm(string_value = "unpaid")] - Unpaid, - #[sea_orm(string_value = "paused")] - Paused, -} - -impl StripeSubscriptionStatus { - pub fn is_cancelable(&self) -> bool { - match self { - Self::Trialing | Self::Active | Self::PastDue => true, - Self::Incomplete - | Self::IncompleteExpired - | Self::Canceled - | Self::Unpaid - | Self::Paused => false, - } - } -} - -/// The cancellation reason for a Stripe subscription. -/// -/// [Stripe docs](https://docs.stripe.com/api/subscriptions/object#subscription_object-cancellation_details-reason) -#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Hash, Serialize)] -#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] -#[serde(rename_all = "snake_case")] -pub enum StripeCancellationReason { - #[sea_orm(string_value = "cancellation_requested")] - CancellationRequested, - #[sea_orm(string_value = "payment_disputed")] - PaymentDisputed, - #[sea_orm(string_value = "payment_failed")] - PaymentFailed, -} - -impl From for StripeCancellationReason { - fn from(value: stripe_client::StripeCancellationDetailsReason) -> Self { - match value { - stripe_client::StripeCancellationDetailsReason::CancellationRequested => { - Self::CancellationRequested - } - stripe_client::StripeCancellationDetailsReason::PaymentDisputed => { - Self::PaymentDisputed - } - stripe_client::StripeCancellationDetailsReason::PaymentFailed => Self::PaymentFailed, - } - } -} diff --git a/crates/collab/src/db/tables/processed_stripe_event.rs b/crates/collab/src/db/tables/processed_stripe_event.rs deleted file mode 100644 index 7b6f0cdc31d951caee57dc45c357178d375af9c8..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/processed_stripe_event.rs +++ /dev/null @@ -1,16 +0,0 @@ -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "processed_stripe_events")] -pub struct Model { - #[sea_orm(primary_key)] - pub stripe_event_id: String, - pub stripe_event_type: String, - pub stripe_event_created_timestamp: i64, - pub processed_at: DateTime, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/project_repository.rs b/crates/collab/src/db/tables/project_repository.rs index 665e87cd1fe8c492170a8459dbf7ac6c086f9e00..eb653ecee37d48ce79e26450eb85d87dec411c1e 100644 --- a/crates/collab/src/db/tables/project_repository.rs +++ b/crates/collab/src/db/tables/project_repository.rs @@ -16,6 +16,8 @@ pub struct Model { pub is_deleted: bool, // JSON array typed string pub current_merge_conflicts: Option, + // The suggested merge commit message + pub merge_message: Option, // A JSON object representing the current Branch values pub branch_summary: Option, // A JSON object representing the current Head commit values diff --git a/crates/collab/src/db/tables/user.rs b/crates/collab/src/db/tables/user.rs index 49fe3eb58f3ee149d9cfee88fd9c4b175854373b..af43fe300a6cc1224487541ca72af9d887a6fae3 100644 --- a/crates/collab/src/db/tables/user.rs +++ b/crates/collab/src/db/tables/user.rs @@ -29,8 +29,6 @@ pub struct Model { pub enum Relation { #[sea_orm(has_many = "super::access_token::Entity")] AccessToken, - #[sea_orm(has_one = "super::billing_customer::Entity")] - BillingCustomer, #[sea_orm(has_one = "super::room_participant::Entity")] RoomParticipant, #[sea_orm(has_many = "super::project::Entity")] @@ -68,12 +66,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::BillingCustomer.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::RoomParticipant.def() diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs index 6c2f9dc82a88c159df1111d01a213259ab3a6c76..2eb8d377acaba9f8fe5ea558a29cc028c2aa11fd 100644 --- a/crates/collab/src/db/tests.rs +++ b/crates/collab/src/db/tests.rs @@ -8,7 +8,6 @@ mod embedding_tests; mod extension_tests; mod feature_flag_tests; mod message_tests; -mod processed_stripe_event_tests; mod user_tests; use crate::migrations::run_database_migrations; diff --git a/crates/collab/src/db/tests/embedding_tests.rs b/crates/collab/src/db/tests/embedding_tests.rs index 367e89f87bff827fe321b0935d52647a9034794a..5d8d69c0304d3a16b55e9d7b1477fe62cc22024a 100644 --- a/crates/collab/src/db/tests/embedding_tests.rs +++ b/crates/collab/src/db/tests/embedding_tests.rs @@ -8,7 +8,7 @@ use time::{Duration, OffsetDateTime, PrimitiveDateTime}; // SQLite does not support array arguments, so we only test this against a real postgres instance #[gpui::test] async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) { - let test_db = TestDb::postgres(cx.executor().clone()); + let test_db = TestDb::postgres(cx.executor()); let db = test_db.db(); let provider = "test_model"; @@ -38,7 +38,7 @@ async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) { #[gpui::test] async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) { - let test_db = TestDb::postgres(cx.executor().clone()); + let test_db = TestDb::postgres(cx.executor()); let db = test_db.db(); let model = "test_model"; diff --git a/crates/collab/src/db/tests/processed_stripe_event_tests.rs b/crates/collab/src/db/tests/processed_stripe_event_tests.rs deleted file mode 100644 index ad93b5a6589dd3a413bd5738dfc2e7debb9228d0..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tests/processed_stripe_event_tests.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::sync::Arc; - -use crate::test_both_dbs; - -use super::{CreateProcessedStripeEventParams, Database}; - -test_both_dbs!( - test_already_processed_stripe_event, - test_already_processed_stripe_event_postgres, - test_already_processed_stripe_event_sqlite -); - -async fn test_already_processed_stripe_event(db: &Arc) { - let unprocessed_event_id = "evt_1PiJOuRxOf7d5PNaw2zzWiyO".to_string(); - let processed_event_id = "evt_1PiIfMRxOf7d5PNakHrAUe8P".to_string(); - - db.create_processed_stripe_event(&CreateProcessedStripeEventParams { - stripe_event_id: processed_event_id.clone(), - stripe_event_type: "customer.created".into(), - stripe_event_created_timestamp: 1722355968, - }) - .await - .unwrap(); - - assert!( - db.already_processed_stripe_event(&processed_event_id) - .await - .unwrap(), - "Expected {processed_event_id} to already be processed" - ); - - assert!( - !db.already_processed_stripe_event(&unprocessed_event_id) - .await - .unwrap(), - "Expected {unprocessed_event_id} to be unprocessed" - ); -} diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 905859ca6996c3593e1f13fbcb0e723531595ff6..191025df3770db78df3a12bc16d5c8f32d54571c 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -7,8 +7,6 @@ pub mod llm; pub mod migrations; pub mod rpc; pub mod seed; -pub mod stripe_billing; -pub mod stripe_client; pub mod user_backfiller; #[cfg(test)] @@ -22,21 +20,16 @@ use axum::{ }; use db::{ChannelId, Database}; use executor::Executor; -use llm::db::LlmDatabase; use serde::Deserialize; use std::{path::PathBuf, sync::Arc}; use util::ResultExt; -use crate::stripe_billing::StripeBilling; -use crate::stripe_client::{RealStripeClient, StripeClient}; - pub type Result = std::result::Result; pub enum Error { Http(StatusCode, String, HeaderMap), Database(sea_orm::error::DbErr), Internal(anyhow::Error), - Stripe(stripe::StripeError), } impl From for Error { @@ -51,12 +44,6 @@ impl From for Error { } } -impl From for Error { - fn from(error: stripe::StripeError) -> Self { - Self::Stripe(error) - } -} - impl From for Error { fn from(error: axum::Error) -> Self { Self::Internal(error.into()) @@ -104,14 +91,6 @@ impl IntoResponse for Error { ); (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response() } - Error::Stripe(error) => { - log::error!( - "HTTP error {}: {:?}", - StatusCode::INTERNAL_SERVER_ERROR, - &error - ); - (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response() - } } } } @@ -122,7 +101,6 @@ impl std::fmt::Debug for Error { Error::Http(code, message, _headers) => (code, message).fmt(f), Error::Database(error) => error.fmt(f), Error::Internal(error) => error.fmt(f), - Error::Stripe(error) => error.fmt(f), } } } @@ -133,7 +111,6 @@ impl std::fmt::Display for Error { Error::Http(code, message, _) => write!(f, "{code}: {message}"), Error::Database(error) => error.fmt(f), Error::Internal(error) => error.fmt(f), - Error::Stripe(error) => error.fmt(f), } } } @@ -179,7 +156,6 @@ pub struct Config { pub zed_client_checksum_seed: Option, pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, - pub stripe_api_key: Option, pub supermaven_admin_api_key: Option>, pub user_backfiller_github_access_token: Option>, } @@ -234,7 +210,6 @@ impl Config { auto_join_channel_id: None, migrations_path: None, seed_path: None, - stripe_api_key: None, supermaven_admin_api_key: None, user_backfiller_github_access_token: None, kinesis_region: None, @@ -266,14 +241,8 @@ impl ServiceMode { pub struct AppState { pub db: Arc, - pub llm_db: Option>, pub livekit_client: Option>, pub blob_store_client: Option, - /// This is a real instance of the Stripe client; we're working to replace references to this with the - /// [`StripeClient`] trait. - pub real_stripe_client: Option>, - pub stripe_client: Option>, - pub stripe_billing: Option>, pub executor: Executor, pub kinesis_client: Option<::aws_sdk_kinesis::Client>, pub config: Config, @@ -286,20 +255,6 @@ impl AppState { let mut db = Database::new(db_options).await?; db.initialize_notification_kinds().await?; - let llm_db = if let Some((llm_database_url, llm_database_max_connections)) = config - .llm_database_url - .clone() - .zip(config.llm_database_max_connections) - { - let mut llm_db_options = db::ConnectOptions::new(llm_database_url); - llm_db_options.max_connections(llm_database_max_connections); - let mut llm_db = LlmDatabase::new(llm_db_options, executor.clone()).await?; - llm_db.initialize().await?; - Some(Arc::new(llm_db)) - } else { - None - }; - let livekit_client = if let Some(((server, key), secret)) = config .livekit_server .as_ref() @@ -316,18 +271,10 @@ impl AppState { }; let db = Arc::new(db); - let stripe_client = build_stripe_client(&config).map(Arc::new).log_err(); let this = Self { db: db.clone(), - llm_db, livekit_client, blob_store_client: build_blob_store_client(&config).await.log_err(), - stripe_billing: stripe_client - .clone() - .map(|stripe_client| Arc::new(StripeBilling::new(stripe_client))), - real_stripe_client: stripe_client.clone(), - stripe_client: stripe_client - .map(|stripe_client| Arc::new(RealStripeClient::new(stripe_client)) as _), executor, kinesis_client: if config.kinesis_access_key.is_some() { build_kinesis_client(&config).await.log_err() @@ -340,14 +287,6 @@ impl AppState { } } -fn build_stripe_client(config: &Config) -> anyhow::Result { - let api_key = config - .stripe_api_key - .as_ref() - .context("missing stripe_api_key")?; - Ok(stripe::Client::new(api_key)) -} - async fn build_blob_store_client(config: &Config) -> anyhow::Result { let keys = aws_sdk_s3::config::Credentials::new( config diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index de74858168fd94ab677cee03f721a1e3fbbdfd46..dec10232bdb000acef9def25cad519ceb213956b 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -1,12 +1 @@ pub mod db; -mod token; - -pub use token::*; - -pub const AGENT_EXTENDED_TRIAL_FEATURE_FLAG: &str = "agent-extended-trial"; - -/// The name of the feature flag that bypasses the account age check. -pub const BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG: &str = "bypass-account-age-check"; - -/// The minimum account age an account must have in order to use the LLM service. -pub const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30); diff --git a/crates/collab/src/llm/db.rs b/crates/collab/src/llm/db.rs index 18ad624dab840c47df766a55c2f59cf9a17c55e6..b15d5a42b5f183831b34552beba3f616d3a7c3f0 100644 --- a/crates/collab/src/llm/db.rs +++ b/crates/collab/src/llm/db.rs @@ -1,30 +1,9 @@ -mod ids; -mod queries; -mod seed; -mod tables; - -#[cfg(test)] -mod tests; - -use cloud_llm_client::LanguageModelProvider; -use collections::HashMap; -pub use ids::*; -pub use seed::*; -pub use tables::*; - -#[cfg(test)] -pub use tests::TestLlmDb; -use usage_measure::UsageMeasure; - use std::future::Future; use std::sync::Arc; use anyhow::Context; pub use sea_orm::ConnectOptions; -use sea_orm::prelude::*; -use sea_orm::{ - ActiveValue, DatabaseConnection, DatabaseTransaction, IsolationLevel, TransactionTrait, -}; +use sea_orm::{DatabaseConnection, DatabaseTransaction, IsolationLevel, TransactionTrait}; use crate::Result; use crate::db::TransactionHandle; @@ -36,9 +15,6 @@ pub struct LlmDatabase { pool: DatabaseConnection, #[allow(unused)] executor: Executor, - provider_ids: HashMap, - models: HashMap<(LanguageModelProvider, String), model::Model>, - usage_measure_ids: HashMap, #[cfg(test)] runtime: Option, } @@ -51,59 +27,11 @@ impl LlmDatabase { options: options.clone(), pool: sea_orm::Database::connect(options).await?, executor, - provider_ids: HashMap::default(), - models: HashMap::default(), - usage_measure_ids: HashMap::default(), #[cfg(test)] runtime: None, }) } - pub async fn initialize(&mut self) -> Result<()> { - self.initialize_providers().await?; - self.initialize_models().await?; - self.initialize_usage_measures().await?; - Ok(()) - } - - /// Returns the list of all known models, with their [`LanguageModelProvider`]. - pub fn all_models(&self) -> Vec<(LanguageModelProvider, model::Model)> { - self.models - .iter() - .map(|((model_provider, _model_name), model)| (*model_provider, model.clone())) - .collect::>() - } - - /// Returns the names of the known models for the given [`LanguageModelProvider`]. - pub fn model_names_for_provider(&self, provider: LanguageModelProvider) -> Vec { - self.models - .keys() - .filter_map(|(model_provider, model_name)| { - if model_provider == &provider { - Some(model_name) - } else { - None - } - }) - .cloned() - .collect::>() - } - - pub fn model(&self, provider: LanguageModelProvider, name: &str) -> Result<&model::Model> { - Ok(self - .models - .get(&(provider, name.to_string())) - .with_context(|| format!("unknown model {provider:?}:{name}"))?) - } - - pub fn model_by_id(&self, id: ModelId) -> Result<&model::Model> { - Ok(self - .models - .values() - .find(|model| model.id == id) - .with_context(|| format!("no model for ID {id:?}"))?) - } - pub fn options(&self) -> &ConnectOptions { &self.options } diff --git a/crates/collab/src/llm/db/ids.rs b/crates/collab/src/llm/db/ids.rs deleted file mode 100644 index 03cab6cee0b9e7a07f2d4d43aa7e556615e34494..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/ids.rs +++ /dev/null @@ -1,11 +0,0 @@ -use sea_orm::{DbErr, entity::prelude::*}; -use serde::{Deserialize, Serialize}; - -use crate::id_type; - -id_type!(BillingEventId); -id_type!(ModelId); -id_type!(ProviderId); -id_type!(RevokedAccessTokenId); -id_type!(UsageId); -id_type!(UsageMeasureId); diff --git a/crates/collab/src/llm/db/queries.rs b/crates/collab/src/llm/db/queries.rs deleted file mode 100644 index 0087218b3ff9fe81850870bc8022bd81fe0ee48d..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/queries.rs +++ /dev/null @@ -1,5 +0,0 @@ -use super::*; - -pub mod providers; -pub mod subscription_usages; -pub mod usages; diff --git a/crates/collab/src/llm/db/queries/providers.rs b/crates/collab/src/llm/db/queries/providers.rs deleted file mode 100644 index 9c7dbdd1847ea1d087582ffd959497bc41757b75..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/queries/providers.rs +++ /dev/null @@ -1,134 +0,0 @@ -use super::*; -use sea_orm::{QueryOrder, sea_query::OnConflict}; -use std::str::FromStr; -use strum::IntoEnumIterator as _; - -pub struct ModelParams { - pub provider: LanguageModelProvider, - pub name: String, - pub max_requests_per_minute: i64, - pub max_tokens_per_minute: i64, - pub max_tokens_per_day: i64, - pub price_per_million_input_tokens: i32, - pub price_per_million_output_tokens: i32, -} - -impl LlmDatabase { - pub async fn initialize_providers(&mut self) -> Result<()> { - self.provider_ids = self - .transaction(|tx| async move { - let existing_providers = provider::Entity::find().all(&*tx).await?; - - let mut new_providers = LanguageModelProvider::iter() - .filter(|provider| { - !existing_providers - .iter() - .any(|p| p.name == provider.to_string()) - }) - .map(|provider| provider::ActiveModel { - name: ActiveValue::set(provider.to_string()), - ..Default::default() - }) - .peekable(); - - if new_providers.peek().is_some() { - provider::Entity::insert_many(new_providers) - .exec(&*tx) - .await?; - } - - let all_providers: HashMap<_, _> = provider::Entity::find() - .all(&*tx) - .await? - .iter() - .filter_map(|provider| { - LanguageModelProvider::from_str(&provider.name) - .ok() - .map(|p| (p, provider.id)) - }) - .collect(); - - Ok(all_providers) - }) - .await?; - Ok(()) - } - - pub async fn initialize_models(&mut self) -> Result<()> { - let all_provider_ids = &self.provider_ids; - self.models = self - .transaction(|tx| async move { - let all_models: HashMap<_, _> = model::Entity::find() - .all(&*tx) - .await? - .into_iter() - .filter_map(|model| { - let provider = all_provider_ids.iter().find_map(|(provider, id)| { - if *id == model.provider_id { - Some(provider) - } else { - None - } - })?; - Some(((*provider, model.name.clone()), model)) - }) - .collect(); - Ok(all_models) - }) - .await?; - Ok(()) - } - - pub async fn insert_models(&mut self, models: &[ModelParams]) -> Result<()> { - let all_provider_ids = &self.provider_ids; - self.transaction(|tx| async move { - model::Entity::insert_many(models.iter().map(|model_params| { - let provider_id = all_provider_ids[&model_params.provider]; - model::ActiveModel { - provider_id: ActiveValue::set(provider_id), - name: ActiveValue::set(model_params.name.clone()), - max_requests_per_minute: ActiveValue::set(model_params.max_requests_per_minute), - max_tokens_per_minute: ActiveValue::set(model_params.max_tokens_per_minute), - max_tokens_per_day: ActiveValue::set(model_params.max_tokens_per_day), - price_per_million_input_tokens: ActiveValue::set( - model_params.price_per_million_input_tokens, - ), - price_per_million_output_tokens: ActiveValue::set( - model_params.price_per_million_output_tokens, - ), - ..Default::default() - } - })) - .on_conflict( - OnConflict::columns([model::Column::ProviderId, model::Column::Name]) - .update_columns([ - model::Column::MaxRequestsPerMinute, - model::Column::MaxTokensPerMinute, - model::Column::MaxTokensPerDay, - model::Column::PricePerMillionInputTokens, - model::Column::PricePerMillionOutputTokens, - ]) - .to_owned(), - ) - .exec_without_returning(&*tx) - .await?; - Ok(()) - }) - .await?; - self.initialize_models().await - } - - /// Returns the list of LLM providers. - pub async fn list_providers(&self) -> Result> { - self.transaction(|tx| async move { - Ok(provider::Entity::find() - .order_by_asc(provider::Column::Name) - .all(&*tx) - .await? - .into_iter() - .filter_map(|p| LanguageModelProvider::from_str(&p.name).ok()) - .collect()) - }) - .await - } -} diff --git a/crates/collab/src/llm/db/queries/subscription_usages.rs b/crates/collab/src/llm/db/queries/subscription_usages.rs deleted file mode 100644 index 8a519790753099be62868e94e8b068958095d320..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/queries/subscription_usages.rs +++ /dev/null @@ -1,38 +0,0 @@ -use crate::db::UserId; - -use super::*; - -impl LlmDatabase { - pub async fn get_subscription_usage_for_period( - &self, - user_id: UserId, - period_start_at: DateTimeUtc, - period_end_at: DateTimeUtc, - ) -> Result> { - self.transaction(|tx| async move { - self.get_subscription_usage_for_period_in_tx( - user_id, - period_start_at, - period_end_at, - &tx, - ) - .await - }) - .await - } - - async fn get_subscription_usage_for_period_in_tx( - &self, - user_id: UserId, - period_start_at: DateTimeUtc, - period_end_at: DateTimeUtc, - tx: &DatabaseTransaction, - ) -> Result> { - Ok(subscription_usage::Entity::find() - .filter(subscription_usage::Column::UserId.eq(user_id)) - .filter(subscription_usage::Column::PeriodStartAt.eq(period_start_at)) - .filter(subscription_usage::Column::PeriodEndAt.eq(period_end_at)) - .one(tx) - .await?) - } -} diff --git a/crates/collab/src/llm/db/queries/usages.rs b/crates/collab/src/llm/db/queries/usages.rs deleted file mode 100644 index a917703f960e657f3ebe345a59558525c7aaa4bb..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/queries/usages.rs +++ /dev/null @@ -1,44 +0,0 @@ -use std::str::FromStr; -use strum::IntoEnumIterator as _; - -use super::*; - -impl LlmDatabase { - pub async fn initialize_usage_measures(&mut self) -> Result<()> { - let all_measures = self - .transaction(|tx| async move { - let existing_measures = usage_measure::Entity::find().all(&*tx).await?; - - let new_measures = UsageMeasure::iter() - .filter(|measure| { - !existing_measures - .iter() - .any(|m| m.name == measure.to_string()) - }) - .map(|measure| usage_measure::ActiveModel { - name: ActiveValue::set(measure.to_string()), - ..Default::default() - }) - .collect::>(); - - if !new_measures.is_empty() { - usage_measure::Entity::insert_many(new_measures) - .exec(&*tx) - .await?; - } - - Ok(usage_measure::Entity::find().all(&*tx).await?) - }) - .await?; - - self.usage_measure_ids = all_measures - .into_iter() - .filter_map(|measure| { - UsageMeasure::from_str(&measure.name) - .ok() - .map(|um| (um, measure.id)) - }) - .collect(); - Ok(()) - } -} diff --git a/crates/collab/src/llm/db/seed.rs b/crates/collab/src/llm/db/seed.rs deleted file mode 100644 index 55c6c30cd5d8bf3c6755c3f9b9faaa6fc689370e..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/seed.rs +++ /dev/null @@ -1,45 +0,0 @@ -use super::*; -use crate::{Config, Result}; -use queries::providers::ModelParams; - -pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool) -> Result<()> { - db.insert_models(&[ - ModelParams { - provider: LanguageModelProvider::Anthropic, - name: "claude-3-5-sonnet".into(), - max_requests_per_minute: 5, - max_tokens_per_minute: 20_000, - max_tokens_per_day: 300_000, - price_per_million_input_tokens: 300, // $3.00/MTok - price_per_million_output_tokens: 1500, // $15.00/MTok - }, - ModelParams { - provider: LanguageModelProvider::Anthropic, - name: "claude-3-opus".into(), - max_requests_per_minute: 5, - max_tokens_per_minute: 10_000, - max_tokens_per_day: 300_000, - price_per_million_input_tokens: 1500, // $15.00/MTok - price_per_million_output_tokens: 7500, // $75.00/MTok - }, - ModelParams { - provider: LanguageModelProvider::Anthropic, - name: "claude-3-sonnet".into(), - max_requests_per_minute: 5, - max_tokens_per_minute: 20_000, - max_tokens_per_day: 300_000, - price_per_million_input_tokens: 1500, // $15.00/MTok - price_per_million_output_tokens: 7500, // $75.00/MTok - }, - ModelParams { - provider: LanguageModelProvider::Anthropic, - name: "claude-3-haiku".into(), - max_requests_per_minute: 5, - max_tokens_per_minute: 25_000, - max_tokens_per_day: 300_000, - price_per_million_input_tokens: 25, // $0.25/MTok - price_per_million_output_tokens: 125, // $1.25/MTok - }, - ]) - .await -} diff --git a/crates/collab/src/llm/db/tables.rs b/crates/collab/src/llm/db/tables.rs deleted file mode 100644 index 75ea8f51409ec28ec546db5a360b935ef04fb7f9..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub mod model; -pub mod provider; -pub mod subscription_usage; -pub mod subscription_usage_meter; -pub mod usage; -pub mod usage_measure; diff --git a/crates/collab/src/llm/db/tables/model.rs b/crates/collab/src/llm/db/tables/model.rs deleted file mode 100644 index f0a858b4a681ce930f9e8d57f5289950a5476ef1..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables/model.rs +++ /dev/null @@ -1,48 +0,0 @@ -use sea_orm::entity::prelude::*; - -use crate::llm::db::{ModelId, ProviderId}; - -/// An LLM model. -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "models")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: ModelId, - pub provider_id: ProviderId, - pub name: String, - pub max_requests_per_minute: i64, - pub max_tokens_per_minute: i64, - pub max_input_tokens_per_minute: i64, - pub max_output_tokens_per_minute: i64, - pub max_tokens_per_day: i64, - pub price_per_million_input_tokens: i32, - pub price_per_million_cache_creation_input_tokens: i32, - pub price_per_million_cache_read_input_tokens: i32, - pub price_per_million_output_tokens: i32, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::provider::Entity", - from = "Column::ProviderId", - to = "super::provider::Column::Id" - )] - Provider, - #[sea_orm(has_many = "super::usage::Entity")] - Usages, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Provider.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Usages.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/provider.rs b/crates/collab/src/llm/db/tables/provider.rs deleted file mode 100644 index 90838f7c65511e83cd7192676e0dafefdd05896a..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables/provider.rs +++ /dev/null @@ -1,25 +0,0 @@ -use crate::llm::db::ProviderId; -use sea_orm::entity::prelude::*; - -/// An LLM provider. -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "providers")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: ProviderId, - pub name: String, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_many = "super::model::Entity")] - Models, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Models.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/subscription_usage.rs b/crates/collab/src/llm/db/tables/subscription_usage.rs deleted file mode 100644 index dd93b03d051ef9752b1c777d24205085fca4487e..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables/subscription_usage.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::db::UserId; -use crate::db::billing_subscription::SubscriptionKind; -use sea_orm::entity::prelude::*; -use time::PrimitiveDateTime; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "subscription_usages_v2")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: Uuid, - pub user_id: UserId, - pub period_start_at: PrimitiveDateTime, - pub period_end_at: PrimitiveDateTime, - pub plan: SubscriptionKind, - pub model_requests: i32, - pub edit_predictions: i32, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/subscription_usage_meter.rs b/crates/collab/src/llm/db/tables/subscription_usage_meter.rs deleted file mode 100644 index c082cf3bc132aa4df2c3c7b5422a0c53ec235579..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables/subscription_usage_meter.rs +++ /dev/null @@ -1,55 +0,0 @@ -use sea_orm::entity::prelude::*; -use serde::Serialize; - -use crate::llm::db::ModelId; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "subscription_usage_meters_v2")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: Uuid, - pub subscription_usage_id: Uuid, - pub model_id: ModelId, - pub mode: CompletionMode, - pub requests: i32, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::subscription_usage::Entity", - from = "Column::SubscriptionUsageId", - to = "super::subscription_usage::Column::Id" - )] - SubscriptionUsage, - #[sea_orm( - belongs_to = "super::model::Entity", - from = "Column::ModelId", - to = "super::model::Column::Id" - )] - Model, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::SubscriptionUsage.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Model.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Hash, Serialize)] -#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] -#[serde(rename_all = "snake_case")] -pub enum CompletionMode { - #[sea_orm(string_value = "normal")] - Normal, - #[sea_orm(string_value = "max")] - Max, -} diff --git a/crates/collab/src/llm/db/tables/usage.rs b/crates/collab/src/llm/db/tables/usage.rs deleted file mode 100644 index 331c94a8a90df2e38601603a746f97ebbf703461..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables/usage.rs +++ /dev/null @@ -1,52 +0,0 @@ -use crate::{ - db::UserId, - llm::db::{ModelId, UsageId, UsageMeasureId}, -}; -use sea_orm::entity::prelude::*; - -/// An LLM usage record. -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "usages")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: UsageId, - /// The ID of the Zed user. - /// - /// Corresponds to the `users` table in the primary collab database. - pub user_id: UserId, - pub model_id: ModelId, - pub measure_id: UsageMeasureId, - pub timestamp: DateTime, - pub buckets: Vec, - pub is_staff: bool, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::model::Entity", - from = "Column::ModelId", - to = "super::model::Column::Id" - )] - Model, - #[sea_orm( - belongs_to = "super::usage_measure::Entity", - from = "Column::MeasureId", - to = "super::usage_measure::Column::Id" - )] - UsageMeasure, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Model.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::UsageMeasure.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/usage_measure.rs b/crates/collab/src/llm/db/tables/usage_measure.rs deleted file mode 100644 index 4f75577ed4684ff73b98389eaa08aefbabc08a16..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tables/usage_measure.rs +++ /dev/null @@ -1,36 +0,0 @@ -use crate::llm::db::UsageMeasureId; -use sea_orm::entity::prelude::*; - -#[derive( - Copy, Clone, Debug, PartialEq, Eq, Hash, strum::EnumString, strum::Display, strum::EnumIter, -)] -#[strum(serialize_all = "snake_case")] -pub enum UsageMeasure { - RequestsPerMinute, - TokensPerMinute, - InputTokensPerMinute, - OutputTokensPerMinute, - TokensPerDay, -} - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "usage_measures")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: UsageMeasureId, - pub name: String, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_many = "super::usage::Entity")] - Usages, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Usages.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tests.rs b/crates/collab/src/llm/db/tests.rs deleted file mode 100644 index 43a1b8b0d457817d1e94d72d0cad094011424c83..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tests.rs +++ /dev/null @@ -1,107 +0,0 @@ -mod provider_tests; - -use gpui::BackgroundExecutor; -use parking_lot::Mutex; -use rand::prelude::*; -use sea_orm::ConnectionTrait; -use sqlx::migrate::MigrateDatabase; -use std::time::Duration; - -use crate::migrations::run_database_migrations; - -use super::*; - -pub struct TestLlmDb { - pub db: Option, - pub connection: Option, -} - -impl TestLlmDb { - pub fn postgres(background: BackgroundExecutor) -> Self { - static LOCK: Mutex<()> = Mutex::new(()); - - let _guard = LOCK.lock(); - let mut rng = StdRng::from_entropy(); - let url = format!( - "postgres://postgres@localhost/zed-llm-test-{}", - rng.r#gen::() - ); - let runtime = tokio::runtime::Builder::new_current_thread() - .enable_io() - .enable_time() - .build() - .unwrap(); - - let mut db = runtime.block_on(async { - sqlx::Postgres::create_database(&url) - .await - .expect("failed to create test db"); - let mut options = ConnectOptions::new(url); - options - .max_connections(5) - .idle_timeout(Duration::from_secs(0)); - let db = LlmDatabase::new(options, Executor::Deterministic(background)) - .await - .unwrap(); - let migrations_path = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations_llm"); - run_database_migrations(db.options(), migrations_path) - .await - .unwrap(); - db - }); - - db.runtime = Some(runtime); - - Self { - db: Some(db), - connection: None, - } - } - - pub fn db(&mut self) -> &mut LlmDatabase { - self.db.as_mut().unwrap() - } -} - -#[macro_export] -macro_rules! test_llm_db { - ($test_name:ident, $postgres_test_name:ident) => { - #[gpui::test] - async fn $postgres_test_name(cx: &mut gpui::TestAppContext) { - if !cfg!(target_os = "macos") { - return; - } - - let mut test_db = $crate::llm::db::TestLlmDb::postgres(cx.executor().clone()); - $test_name(test_db.db()).await; - } - }; -} - -impl Drop for TestLlmDb { - fn drop(&mut self) { - let db = self.db.take().unwrap(); - if let sea_orm::DatabaseBackend::Postgres = db.pool.get_database_backend() { - db.runtime.as_ref().unwrap().block_on(async { - use util::ResultExt; - let query = " - SELECT pg_terminate_backend(pg_stat_activity.pid) - FROM pg_stat_activity - WHERE - pg_stat_activity.datname = current_database() AND - pid <> pg_backend_pid(); - "; - db.pool - .execute(sea_orm::Statement::from_string( - db.pool.get_database_backend(), - query, - )) - .await - .log_err(); - sqlx::Postgres::drop_database(db.options.get_url()) - .await - .log_err(); - }) - } - } -} diff --git a/crates/collab/src/llm/db/tests/provider_tests.rs b/crates/collab/src/llm/db/tests/provider_tests.rs deleted file mode 100644 index f4e1de40ec10705ed9b740619754fcf9ec5f3e1e..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db/tests/provider_tests.rs +++ /dev/null @@ -1,31 +0,0 @@ -use cloud_llm_client::LanguageModelProvider; -use pretty_assertions::assert_eq; - -use crate::llm::db::LlmDatabase; -use crate::test_llm_db; - -test_llm_db!( - test_initialize_providers, - test_initialize_providers_postgres -); - -async fn test_initialize_providers(db: &mut LlmDatabase) { - let initial_providers = db.list_providers().await.unwrap(); - assert_eq!(initial_providers, vec![]); - - db.initialize_providers().await.unwrap(); - - // Do it twice, to make sure the operation is idempotent. - db.initialize_providers().await.unwrap(); - - let providers = db.list_providers().await.unwrap(); - - assert_eq!( - providers, - &[ - LanguageModelProvider::Anthropic, - LanguageModelProvider::Google, - LanguageModelProvider::OpenAi, - ] - ) -} diff --git a/crates/collab/src/llm/token.rs b/crates/collab/src/llm/token.rs deleted file mode 100644 index da01c7f3bed5cab1e7dbd6cfdef8cd4d7643044c..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/token.rs +++ /dev/null @@ -1,146 +0,0 @@ -use crate::db::billing_subscription::SubscriptionKind; -use crate::db::{billing_customer, billing_subscription, user}; -use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG}; -use crate::{Config, db::billing_preference}; -use anyhow::{Context as _, Result}; -use chrono::{NaiveDateTime, Utc}; -use cloud_llm_client::Plan; -use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation}; -use serde::{Deserialize, Serialize}; -use std::time::Duration; -use thiserror::Error; -use uuid::Uuid; - -#[derive(Clone, Debug, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LlmTokenClaims { - pub iat: u64, - pub exp: u64, - pub jti: String, - pub user_id: u64, - pub system_id: Option, - pub metrics_id: Uuid, - pub github_user_login: String, - pub account_created_at: NaiveDateTime, - pub is_staff: bool, - pub has_llm_closed_beta_feature_flag: bool, - pub bypass_account_age_check: bool, - pub use_llm_request_queue: bool, - pub plan: Plan, - pub has_extended_trial: bool, - pub subscription_period: (NaiveDateTime, NaiveDateTime), - pub enable_model_request_overages: bool, - pub model_request_overages_spend_limit_in_cents: u32, - pub can_use_web_search_tool: bool, - #[serde(default)] - pub has_overdue_invoices: bool, -} - -const LLM_TOKEN_LIFETIME: Duration = Duration::from_secs(60 * 60); - -impl LlmTokenClaims { - pub fn create( - user: &user::Model, - is_staff: bool, - billing_customer: billing_customer::Model, - billing_preferences: Option, - feature_flags: &Vec, - subscription: billing_subscription::Model, - system_id: Option, - config: &Config, - ) -> Result { - let secret = config - .llm_api_secret - .as_ref() - .context("no LLM API secret")?; - - let plan = if is_staff { - Plan::ZedPro - } else { - subscription.kind.map_or(Plan::ZedFree, |kind| match kind { - SubscriptionKind::ZedFree => Plan::ZedFree, - SubscriptionKind::ZedPro => Plan::ZedPro, - SubscriptionKind::ZedProTrial => Plan::ZedProTrial, - }) - }; - let subscription_period = - billing_subscription::Model::current_period(Some(subscription), is_staff) - .map(|(start, end)| (start.naive_utc(), end.naive_utc())) - .context("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started.")?; - - let now = Utc::now(); - let claims = Self { - iat: now.timestamp() as u64, - exp: (now + LLM_TOKEN_LIFETIME).timestamp() as u64, - jti: uuid::Uuid::new_v4().to_string(), - user_id: user.id.to_proto(), - system_id, - metrics_id: user.metrics_id, - github_user_login: user.github_login.clone(), - account_created_at: user.account_created_at(), - is_staff, - has_llm_closed_beta_feature_flag: feature_flags - .iter() - .any(|flag| flag == "llm-closed-beta"), - bypass_account_age_check: feature_flags - .iter() - .any(|flag| flag == BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG), - can_use_web_search_tool: true, - use_llm_request_queue: feature_flags.iter().any(|flag| flag == "llm-request-queue"), - plan, - has_extended_trial: feature_flags - .iter() - .any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG), - subscription_period, - enable_model_request_overages: billing_preferences - .as_ref() - .map_or(false, |preferences| { - preferences.model_request_overages_enabled - }), - model_request_overages_spend_limit_in_cents: billing_preferences - .as_ref() - .map_or(0, |preferences| { - preferences.model_request_overages_spend_limit_in_cents as u32 - }), - has_overdue_invoices: billing_customer.has_overdue_invoices, - }; - - Ok(jsonwebtoken::encode( - &Header::default(), - &claims, - &EncodingKey::from_secret(secret.as_ref()), - )?) - } - - pub fn validate(token: &str, config: &Config) -> Result { - let secret = config - .llm_api_secret - .as_ref() - .context("no LLM API secret")?; - - match jsonwebtoken::decode::( - token, - &DecodingKey::from_secret(secret.as_ref()), - &Validation::default(), - ) { - Ok(token) => Ok(token.claims), - Err(e) => { - if e.kind() == &jsonwebtoken::errors::ErrorKind::ExpiredSignature { - Err(ValidateLlmTokenError::Expired) - } else { - Err(ValidateLlmTokenError::JwtError(e)) - } - } - } - } -} - -#[derive(Error, Debug)] -pub enum ValidateLlmTokenError { - #[error("access token is expired")] - Expired, - #[error("access token validation error: {0}")] - JwtError(#[from] jsonwebtoken::errors::Error), - #[error("{0}")] - Other(#[from] anyhow::Error), -} diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index 20641cb2322a6aa10372064ca208eef091b2ae5a..cb6f6cad1dd483c463bcda5d8a4ff914f4bf10aa 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -62,13 +62,6 @@ async fn main() -> Result<()> { db.initialize_notification_kinds().await?; collab::seed::seed(&config, &db, false).await?; - - if let Some(llm_database_url) = config.llm_database_url.clone() { - let db_options = db::ConnectOptions::new(llm_database_url); - let mut db = LlmDatabase::new(db_options.clone(), Executor::Production).await?; - db.initialize().await?; - collab::llm::db::seed_database(&config, &mut db, true).await?; - } } Some("serve") => { let mode = match args.next().as_deref() { @@ -102,13 +95,6 @@ async fn main() -> Result<()> { let state = AppState::new(config, Executor::Production).await?; - if let Some(stripe_billing) = state.stripe_billing.clone() { - let executor = state.executor.clone(); - executor.spawn_detached(async move { - stripe_billing.initialize().await.trace_err(); - }); - } - if mode.is_collab() { state.db.purge_old_embeddings().await.trace_err(); @@ -270,9 +256,6 @@ async fn setup_llm_database(config: &Config) -> Result<()> { .llm_database_migrations_path .as_deref() .unwrap_or_else(|| { - #[cfg(feature = "sqlite")] - let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations_llm.sqlite"); - #[cfg(not(feature = "sqlite"))] let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations_llm"); Path::new(default_migrations) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index ec1105b138f728de36d0798ddb6ddf22aa4bc8c8..73f327166a3f1fb40a1f232ea2fabcdedd3fb129 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1,14 +1,6 @@ mod connection_pool; -use crate::api::billing::find_or_create_billing_customer; use crate::api::{CloudflareIpCountryHeader, SystemIdHeader}; -use crate::db::billing_subscription::SubscriptionKind; -use crate::llm::db::LlmDatabase; -use crate::llm::{ - AGENT_EXTENDED_TRIAL_FEATURE_FLAG, BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG, LlmTokenClaims, - MIN_ACCOUNT_AGE_FOR_LLM_USE, -}; -use crate::stripe_client::StripeCustomerId; use crate::{ AppState, Error, Result, auth, db::{ @@ -37,7 +29,6 @@ use axum::{ response::IntoResponse, routing::get, }; -use chrono::Utc; use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; @@ -148,13 +139,6 @@ pub enum Principal { } impl Principal { - fn user(&self) -> &User { - match self { - Principal::User(user) => user, - Principal::Impersonated { user, .. } => user, - } - } - fn update_span(&self, span: &tracing::Span) { match &self { Principal::User(user) => { @@ -218,6 +202,7 @@ struct Session { /// The GeoIP country code for the user. #[allow(unused)] geoip_country_code: Option, + #[allow(unused)] system_id: Option, _executor: Executor, } @@ -325,7 +310,7 @@ impl Server { let mut server = Self { id: parking_lot::Mutex::new(id), peer: Peer::new(id.0 as u32), - app_state: app_state.clone(), + app_state, connection_pool: Default::default(), handlers: Default::default(), teardown: watch::channel(false).0, @@ -415,6 +400,8 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(multi_lsp_query) + .add_request_handler(lsp_query) + .add_message_handler(broadcast_project_message_from_host::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) @@ -463,9 +450,6 @@ impl Server { .add_request_handler(follow) .add_message_handler(unfollow) .add_message_handler(update_followers) - .add_request_handler(get_private_user_info) - .add_request_handler(get_llm_api_token) - .add_request_handler(accept_terms_of_service) .add_message_handler(acknowledge_channel_message) .add_message_handler(acknowledge_buffer_version) .add_request_handler(get_supermaven_api_key) @@ -634,10 +618,10 @@ impl Server { } } - if let Some(live_kit) = livekit_client.as_ref() { - if delete_livekit_room { - live_kit.delete_room(livekit_room).await.trace_err(); - } + if let Some(live_kit) = livekit_client.as_ref() + && delete_livekit_room + { + live_kit.delete_room(livekit_room).await.trace_err(); } } } @@ -928,7 +912,10 @@ impl Server { user_id=field::Empty, login=field::Empty, impersonator=field::Empty, + // todo(lsp) remove after Zed Stable hits v0.204.x multi_lsp_query_request=field::Empty, + lsp_query_request=field::Empty, + release_channel=field::Empty, { TOTAL_DURATION_MS }=field::Empty, { PROCESSING_DURATION_MS }=field::Empty, { QUEUE_DURATION_MS }=field::Empty, @@ -999,8 +986,6 @@ impl Server { .await?; } - update_user_plan(session).await?; - let contacts = self.app_state.db.get_contacts(user.id).await?; { @@ -1034,99 +1019,52 @@ impl Server { inviter_id: UserId, invitee_id: UserId, ) -> Result<()> { - if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? { - if let Some(code) = &user.invite_code { - let pool = self.connection_pool.lock(); - let invitee_contact = contact_for_user(invitee_id, false, &pool); - for connection_id in pool.user_connection_ids(inviter_id) { - self.peer.send( - connection_id, - proto::UpdateContacts { - contacts: vec![invitee_contact.clone()], - ..Default::default() - }, - )?; - self.peer.send( - connection_id, - proto::UpdateInviteInfo { - url: format!("{}{}", self.app_state.config.invite_link_prefix, &code), - count: user.invite_count as u32, - }, - )?; - } + if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? + && let Some(code) = &user.invite_code + { + let pool = self.connection_pool.lock(); + let invitee_contact = contact_for_user(invitee_id, false, &pool); + for connection_id in pool.user_connection_ids(inviter_id) { + self.peer.send( + connection_id, + proto::UpdateContacts { + contacts: vec![invitee_contact.clone()], + ..Default::default() + }, + )?; + self.peer.send( + connection_id, + proto::UpdateInviteInfo { + url: format!("{}{}", self.app_state.config.invite_link_prefix, &code), + count: user.invite_count as u32, + }, + )?; } } Ok(()) } pub async fn invite_count_updated(self: &Arc, user_id: UserId) -> Result<()> { - if let Some(user) = self.app_state.db.get_user_by_id(user_id).await? { - if let Some(invite_code) = &user.invite_code { - let pool = self.connection_pool.lock(); - for connection_id in pool.user_connection_ids(user_id) { - self.peer.send( - connection_id, - proto::UpdateInviteInfo { - url: format!( - "{}{}", - self.app_state.config.invite_link_prefix, invite_code - ), - count: user.invite_count as u32, - }, - )?; - } + if let Some(user) = self.app_state.db.get_user_by_id(user_id).await? + && let Some(invite_code) = &user.invite_code + { + let pool = self.connection_pool.lock(); + for connection_id in pool.user_connection_ids(user_id) { + self.peer.send( + connection_id, + proto::UpdateInviteInfo { + url: format!( + "{}{}", + self.app_state.config.invite_link_prefix, invite_code + ), + count: user.invite_count as u32, + }, + )?; } } Ok(()) } - pub async fn update_plan_for_user( - self: &Arc, - user_id: UserId, - update_user_plan: proto::UpdateUserPlan, - ) -> Result<()> { - let pool = self.connection_pool.lock(); - for connection_id in pool.user_connection_ids(user_id) { - self.peer - .send(connection_id, update_user_plan.clone()) - .trace_err(); - } - - Ok(()) - } - - /// This is the legacy way of updating the user's plan, where we fetch the data to construct the `UpdateUserPlan` - /// message on the Collab server. - /// - /// The new way is to receive the data from Cloud via the `POST /users/:id/update_plan` endpoint. - pub async fn update_plan_for_user_legacy(self: &Arc, user_id: UserId) -> Result<()> { - let user = self - .app_state - .db - .get_user_by_id(user_id) - .await? - .context("user not found")?; - - let update_user_plan = make_update_user_plan_message( - &user, - user.admin, - &self.app_state.db, - self.app_state.llm_db.clone(), - ) - .await?; - - self.update_plan_for_user(user_id, update_user_plan).await - } - - pub async fn refresh_llm_tokens_for_user(self: &Arc, user_id: UserId) { - let pool = self.connection_pool.lock(); - for connection_id in pool.user_connection_ids(user_id) { - self.peer - .send(connection_id, proto::RefreshLlmToken {}) - .trace_err(); - } - } - pub async fn snapshot(self: &Arc) -> ServerSnapshot<'_> { ServerSnapshot { connection_pool: ConnectionPoolGuard { @@ -1167,10 +1105,10 @@ fn broadcast( F: FnMut(ConnectionId) -> anyhow::Result<()>, { for receiver_id in receiver_ids { - if Some(receiver_id) != sender_id { - if let Err(error) = f(receiver_id) { - tracing::error!("failed to send to {:?} {}", receiver_id, error); - } + if Some(receiver_id) != sender_id + && let Err(error) = f(receiver_id) + { + tracing::error!("failed to send to {:?} {}", receiver_id, error); } } } @@ -1452,9 +1390,7 @@ async fn create_room( let live_kit = live_kit?; let user_id = session.user_id().to_string(); - let token = live_kit - .room_token(&livekit_room, &user_id.to_string()) - .trace_err()?; + let token = live_kit.room_token(&livekit_room, &user_id).trace_err()?; Some(proto::LiveKitConnectionInfo { server_url: live_kit.url().into(), @@ -2081,9 +2017,9 @@ async fn join_project( .unzip(); response.send(proto::JoinProjectResponse { project_id: project.id.0 as u64, - worktrees: worktrees.clone(), + worktrees, replica_id: replica_id.0 as u32, - collaborators: collaborators.clone(), + collaborators, language_servers, language_server_capabilities, role: project.role.into(), @@ -2360,11 +2296,10 @@ async fn update_language_server( let db = session.db().await; if let Some(proto::update_language_server::Variant::MetadataUpdated(update)) = &request.variant + && let Some(capabilities) = update.capabilities.clone() { - if let Some(capabilities) = update.capabilities.clone() { - db.update_server_capabilities(project_id, request.language_server_id, capabilities) - .await?; - } + db.update_server_capabilities(project_id, request.language_server_id, capabilities) + .await?; } let project_connection_ids = db @@ -2425,6 +2360,7 @@ where Ok(()) } +// todo(lsp) remove after Zed Stable hits v0.204.x async fn multi_lsp_query( request: MultiLspQuery, response: Response, @@ -2435,6 +2371,21 @@ async fn multi_lsp_query( forward_mutating_project_request(request, response, session).await } +async fn lsp_query( + request: proto::LspQuery, + response: Response, + session: MessageContext, +) -> Result<()> { + let (name, should_write) = request.query_name_and_write_permissions(); + tracing::Span::current().record("lsp_query_request", name); + tracing::info!("lsp_query message received"); + if should_write { + forward_mutating_project_request(request, response, session).await + } else { + forward_read_only_project_request(request, response, session).await + } +} + /// Notify other participants that a new buffer has been created async fn create_buffer_for_peer( request: proto::CreateBufferForPeer, @@ -2881,214 +2832,6 @@ fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool { version.0.minor() < 139 } -async fn current_plan(db: &Arc, user_id: UserId, is_staff: bool) -> Result { - if is_staff { - return Ok(proto::Plan::ZedPro); - } - - let subscription = db.get_active_billing_subscription(user_id).await?; - let subscription_kind = subscription.and_then(|subscription| subscription.kind); - - let plan = if let Some(subscription_kind) = subscription_kind { - match subscription_kind { - SubscriptionKind::ZedPro => proto::Plan::ZedPro, - SubscriptionKind::ZedProTrial => proto::Plan::ZedProTrial, - SubscriptionKind::ZedFree => proto::Plan::Free, - } - } else { - proto::Plan::Free - }; - - Ok(plan) -} - -async fn make_update_user_plan_message( - user: &User, - is_staff: bool, - db: &Arc, - llm_db: Option>, -) -> Result { - let feature_flags = db.get_user_flags(user.id).await?; - let plan = current_plan(db, user.id, is_staff).await?; - let billing_customer = db.get_billing_customer_by_user_id(user.id).await?; - let billing_preferences = db.get_billing_preferences(user.id).await?; - - let (subscription_period, usage) = if let Some(llm_db) = llm_db { - let subscription = db.get_active_billing_subscription(user.id).await?; - - let subscription_period = - crate::db::billing_subscription::Model::current_period(subscription, is_staff); - - let usage = if let Some((period_start_at, period_end_at)) = subscription_period { - llm_db - .get_subscription_usage_for_period(user.id, period_start_at, period_end_at) - .await? - } else { - None - }; - - (subscription_period, usage) - } else { - (None, None) - }; - - let bypass_account_age_check = feature_flags - .iter() - .any(|flag| flag == BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG); - let account_too_young = !matches!(plan, proto::Plan::ZedPro) - && !bypass_account_age_check - && user.account_age() < MIN_ACCOUNT_AGE_FOR_LLM_USE; - - Ok(proto::UpdateUserPlan { - plan: plan.into(), - trial_started_at: billing_customer - .as_ref() - .and_then(|billing_customer| billing_customer.trial_started_at) - .map(|trial_started_at| trial_started_at.and_utc().timestamp() as u64), - is_usage_based_billing_enabled: if is_staff { - Some(true) - } else { - billing_preferences.map(|preferences| preferences.model_request_overages_enabled) - }, - subscription_period: subscription_period.map(|(started_at, ended_at)| { - proto::SubscriptionPeriod { - started_at: started_at.timestamp() as u64, - ended_at: ended_at.timestamp() as u64, - } - }), - account_too_young: Some(account_too_young), - has_overdue_invoices: billing_customer - .map(|billing_customer| billing_customer.has_overdue_invoices), - usage: Some( - usage - .map(|usage| subscription_usage_to_proto(plan, usage, &feature_flags)) - .unwrap_or_else(|| make_default_subscription_usage(plan, &feature_flags)), - ), - }) -} - -fn model_requests_limit( - plan: cloud_llm_client::Plan, - feature_flags: &Vec, -) -> cloud_llm_client::UsageLimit { - match plan.model_requests_limit() { - cloud_llm_client::UsageLimit::Limited(limit) => { - let limit = if plan == cloud_llm_client::Plan::ZedProTrial - && feature_flags - .iter() - .any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG) - { - 1_000 - } else { - limit - }; - - cloud_llm_client::UsageLimit::Limited(limit) - } - cloud_llm_client::UsageLimit::Unlimited => cloud_llm_client::UsageLimit::Unlimited, - } -} - -fn subscription_usage_to_proto( - plan: proto::Plan, - usage: crate::llm::db::subscription_usage::Model, - feature_flags: &Vec, -) -> proto::SubscriptionUsage { - let plan = match plan { - proto::Plan::Free => cloud_llm_client::Plan::ZedFree, - proto::Plan::ZedPro => cloud_llm_client::Plan::ZedPro, - proto::Plan::ZedProTrial => cloud_llm_client::Plan::ZedProTrial, - }; - - proto::SubscriptionUsage { - model_requests_usage_amount: usage.model_requests as u32, - model_requests_usage_limit: Some(proto::UsageLimit { - variant: Some(match model_requests_limit(plan, feature_flags) { - cloud_llm_client::UsageLimit::Limited(limit) => { - proto::usage_limit::Variant::Limited(proto::usage_limit::Limited { - limit: limit as u32, - }) - } - cloud_llm_client::UsageLimit::Unlimited => { - proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {}) - } - }), - }), - edit_predictions_usage_amount: usage.edit_predictions as u32, - edit_predictions_usage_limit: Some(proto::UsageLimit { - variant: Some(match plan.edit_predictions_limit() { - cloud_llm_client::UsageLimit::Limited(limit) => { - proto::usage_limit::Variant::Limited(proto::usage_limit::Limited { - limit: limit as u32, - }) - } - cloud_llm_client::UsageLimit::Unlimited => { - proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {}) - } - }), - }), - } -} - -fn make_default_subscription_usage( - plan: proto::Plan, - feature_flags: &Vec, -) -> proto::SubscriptionUsage { - let plan = match plan { - proto::Plan::Free => cloud_llm_client::Plan::ZedFree, - proto::Plan::ZedPro => cloud_llm_client::Plan::ZedPro, - proto::Plan::ZedProTrial => cloud_llm_client::Plan::ZedProTrial, - }; - - proto::SubscriptionUsage { - model_requests_usage_amount: 0, - model_requests_usage_limit: Some(proto::UsageLimit { - variant: Some(match model_requests_limit(plan, feature_flags) { - cloud_llm_client::UsageLimit::Limited(limit) => { - proto::usage_limit::Variant::Limited(proto::usage_limit::Limited { - limit: limit as u32, - }) - } - cloud_llm_client::UsageLimit::Unlimited => { - proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {}) - } - }), - }), - edit_predictions_usage_amount: 0, - edit_predictions_usage_limit: Some(proto::UsageLimit { - variant: Some(match plan.edit_predictions_limit() { - cloud_llm_client::UsageLimit::Limited(limit) => { - proto::usage_limit::Variant::Limited(proto::usage_limit::Limited { - limit: limit as u32, - }) - } - cloud_llm_client::UsageLimit::Unlimited => { - proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {}) - } - }), - }), - } -} - -async fn update_user_plan(session: &Session) -> Result<()> { - let db = session.db().await; - - let update_user_plan = make_update_user_plan_message( - session.principal.user(), - session.is_staff(), - &db.0, - session.app_state.llm_db.clone(), - ) - .await?; - - session - .peer - .send(session.connection_id, update_user_plan) - .trace_err(); - - Ok(()) -} - async fn subscribe_to_channels( _: proto::SubscribeToChannels, session: MessageContext, @@ -4257,139 +4000,6 @@ async fn mark_notification_as_read( Ok(()) } -/// Get the current users information -async fn get_private_user_info( - _request: proto::GetPrivateUserInfo, - response: Response, - session: MessageContext, -) -> Result<()> { - let db = session.db().await; - - let metrics_id = db.get_user_metrics_id(session.user_id()).await?; - let user = db - .get_user_by_id(session.user_id()) - .await? - .context("user not found")?; - let flags = db.get_user_flags(session.user_id()).await?; - - response.send(proto::GetPrivateUserInfoResponse { - metrics_id, - staff: user.admin, - flags, - accepted_tos_at: user.accepted_tos_at.map(|t| t.and_utc().timestamp() as u64), - })?; - Ok(()) -} - -/// Accept the terms of service (tos) on behalf of the current user -async fn accept_terms_of_service( - _request: proto::AcceptTermsOfService, - response: Response, - session: MessageContext, -) -> Result<()> { - let db = session.db().await; - - let accepted_tos_at = Utc::now(); - db.set_user_accepted_tos_at(session.user_id(), Some(accepted_tos_at.naive_utc())) - .await?; - - response.send(proto::AcceptTermsOfServiceResponse { - accepted_tos_at: accepted_tos_at.timestamp() as u64, - })?; - - // When the user accepts the terms of service, we want to refresh their LLM - // token to grant access. - session - .peer - .send(session.connection_id, proto::RefreshLlmToken {})?; - - Ok(()) -} - -async fn get_llm_api_token( - _request: proto::GetLlmToken, - response: Response, - session: MessageContext, -) -> Result<()> { - let db = session.db().await; - - let flags = db.get_user_flags(session.user_id()).await?; - - let user_id = session.user_id(); - let user = db - .get_user_by_id(user_id) - .await? - .with_context(|| format!("user {user_id} not found"))?; - - if user.accepted_tos_at.is_none() { - Err(anyhow!("terms of service not accepted"))? - } - - let stripe_client = session - .app_state - .stripe_client - .as_ref() - .context("failed to retrieve Stripe client")?; - - let stripe_billing = session - .app_state - .stripe_billing - .as_ref() - .context("failed to retrieve Stripe billing object")?; - - let billing_customer = if let Some(billing_customer) = - db.get_billing_customer_by_user_id(user.id).await? - { - billing_customer - } else { - let customer_id = stripe_billing - .find_or_create_customer_by_email(user.email_address.as_deref()) - .await?; - - find_or_create_billing_customer(&session.app_state, stripe_client.as_ref(), &customer_id) - .await? - .context("billing customer not found")? - }; - - let billing_subscription = - if let Some(billing_subscription) = db.get_active_billing_subscription(user.id).await? { - billing_subscription - } else { - let stripe_customer_id = - StripeCustomerId(billing_customer.stripe_customer_id.clone().into()); - - let stripe_subscription = stripe_billing - .subscribe_to_zed_free(stripe_customer_id) - .await?; - - db.create_billing_subscription(&db::CreateBillingSubscriptionParams { - billing_customer_id: billing_customer.id, - kind: Some(SubscriptionKind::ZedFree), - stripe_subscription_id: stripe_subscription.id.to_string(), - stripe_subscription_status: stripe_subscription.status.into(), - stripe_cancellation_reason: None, - stripe_current_period_start: Some(stripe_subscription.current_period_start), - stripe_current_period_end: Some(stripe_subscription.current_period_end), - }) - .await? - }; - - let billing_preferences = db.get_billing_preferences(user.id).await?; - - let token = LlmTokenClaims::create( - &user, - session.is_staff(), - billing_customer, - billing_preferences, - &flags, - billing_subscription, - session.system_id.clone(), - &session.app_state.config, - )?; - response.send(proto::GetLlmTokenResponse { token })?; - Ok(()) -} - fn to_axum_message(message: TungsteniteMessage) -> anyhow::Result { let message = match message { TungsteniteMessage::Text(payload) => AxumMessage::Text(payload.as_str().to_string()), diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index 35290fa697680140e52a147cc25cd87b6afee31e..729e7c8533460c0789d74040e883d48c8b94af92 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -30,7 +30,19 @@ impl fmt::Display for ZedVersion { impl ZedVersion { pub fn can_collaborate(&self) -> bool { - self.0 >= SemanticVersion::new(0, 157, 0) + // v0.198.4 is the first version where we no longer connect to Collab automatically. + // We reject any clients older than that to prevent them from connecting to Collab just for authentication. + if self.0 < SemanticVersion::new(0, 198, 4) { + return false; + } + + // Since we hotfixed the changes to no longer connect to Collab automatically to Preview, we also need to reject + // versions in the range [v0.199.0, v0.199.1]. + if self.0 >= SemanticVersion::new(0, 199, 0) && self.0 < SemanticVersion::new(0, 199, 2) { + return false; + } + + true } } diff --git a/crates/collab/src/stripe_billing.rs b/crates/collab/src/stripe_billing.rs deleted file mode 100644 index ef5bef3e7e5d6c687e4b963f820d5d484e6c4537..0000000000000000000000000000000000000000 --- a/crates/collab/src/stripe_billing.rs +++ /dev/null @@ -1,156 +0,0 @@ -use std::sync::Arc; - -use anyhow::anyhow; -use collections::HashMap; -use stripe::SubscriptionStatus; -use tokio::sync::RwLock; - -use crate::Result; -use crate::stripe_client::{ - RealStripeClient, StripeAutomaticTax, StripeClient, StripeCreateSubscriptionItems, - StripeCreateSubscriptionParams, StripeCustomerId, StripePrice, StripePriceId, - StripeSubscription, -}; - -pub struct StripeBilling { - state: RwLock, - client: Arc, -} - -#[derive(Default)] -struct StripeBillingState { - prices_by_lookup_key: HashMap, -} - -impl StripeBilling { - pub fn new(client: Arc) -> Self { - Self { - client: Arc::new(RealStripeClient::new(client.clone())), - state: RwLock::default(), - } - } - - #[cfg(test)] - pub fn test(client: Arc) -> Self { - Self { - client, - state: RwLock::default(), - } - } - - pub fn client(&self) -> &Arc { - &self.client - } - - pub async fn initialize(&self) -> Result<()> { - log::info!("StripeBilling: initializing"); - - let mut state = self.state.write().await; - - let prices = self.client.list_prices().await?; - - for price in prices { - if let Some(lookup_key) = price.lookup_key.clone() { - state.prices_by_lookup_key.insert(lookup_key, price); - } - } - - log::info!("StripeBilling: initialized"); - - Ok(()) - } - - pub async fn zed_pro_price_id(&self) -> Result { - self.find_price_id_by_lookup_key("zed-pro").await - } - - pub async fn zed_free_price_id(&self) -> Result { - self.find_price_id_by_lookup_key("zed-free").await - } - - pub async fn find_price_id_by_lookup_key(&self, lookup_key: &str) -> Result { - self.state - .read() - .await - .prices_by_lookup_key - .get(lookup_key) - .map(|price| price.id.clone()) - .ok_or_else(|| crate::Error::Internal(anyhow!("no price ID found for {lookup_key:?}"))) - } - - pub async fn find_price_by_lookup_key(&self, lookup_key: &str) -> Result { - self.state - .read() - .await - .prices_by_lookup_key - .get(lookup_key) - .cloned() - .ok_or_else(|| crate::Error::Internal(anyhow!("no price found for {lookup_key:?}"))) - } - - /// Returns the Stripe customer associated with the provided email address, or creates a new customer, if one does - /// not already exist. - /// - /// Always returns a new Stripe customer if the email address is `None`. - pub async fn find_or_create_customer_by_email( - &self, - email_address: Option<&str>, - ) -> Result { - let existing_customer = if let Some(email) = email_address { - let customers = self.client.list_customers_by_email(email).await?; - - customers.first().cloned() - } else { - None - }; - - let customer_id = if let Some(existing_customer) = existing_customer { - existing_customer.id - } else { - let customer = self - .client - .create_customer(crate::stripe_client::CreateCustomerParams { - email: email_address, - }) - .await?; - - customer.id - }; - - Ok(customer_id) - } - - pub async fn subscribe_to_zed_free( - &self, - customer_id: StripeCustomerId, - ) -> Result { - let zed_free_price_id = self.zed_free_price_id().await?; - - let existing_subscriptions = self - .client - .list_subscriptions_for_customer(&customer_id) - .await?; - - let existing_active_subscription = - existing_subscriptions.into_iter().find(|subscription| { - subscription.status == SubscriptionStatus::Active - || subscription.status == SubscriptionStatus::Trialing - }); - if let Some(subscription) = existing_active_subscription { - return Ok(subscription); - } - - let params = StripeCreateSubscriptionParams { - customer: customer_id, - items: vec![StripeCreateSubscriptionItems { - price: Some(zed_free_price_id), - quantity: Some(1), - }], - automatic_tax: Some(StripeAutomaticTax { enabled: true }), - }; - - let subscription = self.client.create_subscription(params).await?; - - Ok(subscription) - } -} diff --git a/crates/collab/src/stripe_client.rs b/crates/collab/src/stripe_client.rs deleted file mode 100644 index 6e75a4d874bf41e7cb4418d4b56cfeb6040e5ff8..0000000000000000000000000000000000000000 --- a/crates/collab/src/stripe_client.rs +++ /dev/null @@ -1,285 +0,0 @@ -#[cfg(test)] -mod fake_stripe_client; -mod real_stripe_client; - -use std::collections::HashMap; -use std::sync::Arc; - -use anyhow::Result; -use async_trait::async_trait; - -#[cfg(test)] -pub use fake_stripe_client::*; -pub use real_stripe_client::*; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display, Serialize)] -pub struct StripeCustomerId(pub Arc); - -#[derive(Debug, Clone)] -pub struct StripeCustomer { - pub id: StripeCustomerId, - pub email: Option, -} - -#[derive(Debug)] -pub struct CreateCustomerParams<'a> { - pub email: Option<&'a str>, -} - -#[derive(Debug)] -pub struct UpdateCustomerParams<'a> { - pub email: Option<&'a str>, -} - -#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display)] -pub struct StripeSubscriptionId(pub Arc); - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeSubscription { - pub id: StripeSubscriptionId, - pub customer: StripeCustomerId, - // TODO: Create our own version of this enum. - pub status: stripe::SubscriptionStatus, - pub current_period_end: i64, - pub current_period_start: i64, - pub items: Vec, - pub cancel_at: Option, - pub cancellation_details: Option, -} - -#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display)] -pub struct StripeSubscriptionItemId(pub Arc); - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeSubscriptionItem { - pub id: StripeSubscriptionItemId, - pub price: Option, -} - -#[derive(Debug, Clone, PartialEq)] -pub struct StripeCancellationDetails { - pub reason: Option, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeCancellationDetailsReason { - CancellationRequested, - PaymentDisputed, - PaymentFailed, -} - -#[derive(Debug)] -pub struct StripeCreateSubscriptionParams { - pub customer: StripeCustomerId, - pub items: Vec, - pub automatic_tax: Option, -} - -#[derive(Debug)] -pub struct StripeCreateSubscriptionItems { - pub price: Option, - pub quantity: Option, -} - -#[derive(Debug, Clone)] -pub struct UpdateSubscriptionParams { - pub items: Option>, - pub trial_settings: Option, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct UpdateSubscriptionItems { - pub price: Option, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeSubscriptionTrialSettings { - pub end_behavior: StripeSubscriptionTrialSettingsEndBehavior, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeSubscriptionTrialSettingsEndBehavior { - pub missing_payment_method: StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod { - Cancel, - CreateInvoice, - Pause, -} - -#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display)] -pub struct StripePriceId(pub Arc); - -#[derive(Debug, PartialEq, Clone)] -pub struct StripePrice { - pub id: StripePriceId, - pub unit_amount: Option, - pub lookup_key: Option, - pub recurring: Option, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripePriceRecurring { - pub meter: Option, -} - -#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display, Deserialize)] -pub struct StripeMeterId(pub Arc); - -#[derive(Debug, Clone, Deserialize)] -pub struct StripeMeter { - pub id: StripeMeterId, - pub event_name: String, -} - -#[derive(Debug, Serialize)] -pub struct StripeCreateMeterEventParams<'a> { - pub identifier: &'a str, - pub event_name: &'a str, - pub payload: StripeCreateMeterEventPayload<'a>, - pub timestamp: Option, -} - -#[derive(Debug, Serialize)] -pub struct StripeCreateMeterEventPayload<'a> { - pub value: u64, - pub stripe_customer_id: &'a StripeCustomerId, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeBillingAddressCollection { - Auto, - Required, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeCustomerUpdate { - pub address: Option, - pub name: Option, - pub shipping: Option, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeCustomerUpdateAddress { - Auto, - Never, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeCustomerUpdateName { - Auto, - Never, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeCustomerUpdateShipping { - Auto, - Never, -} - -#[derive(Debug, Default)] -pub struct StripeCreateCheckoutSessionParams<'a> { - pub customer: Option<&'a StripeCustomerId>, - pub client_reference_id: Option<&'a str>, - pub mode: Option, - pub line_items: Option>, - pub payment_method_collection: Option, - pub subscription_data: Option, - pub success_url: Option<&'a str>, - pub billing_address_collection: Option, - pub customer_update: Option, - pub tax_id_collection: Option, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeCheckoutSessionMode { - Payment, - Setup, - Subscription, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeCreateCheckoutSessionLineItems { - pub price: Option, - pub quantity: Option, -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StripeCheckoutSessionPaymentMethodCollection { - Always, - IfRequired, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeCreateCheckoutSessionSubscriptionData { - pub metadata: Option>, - pub trial_period_days: Option, - pub trial_settings: Option, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct StripeTaxIdCollection { - pub enabled: bool, -} - -#[derive(Debug, Clone)] -pub struct StripeAutomaticTax { - pub enabled: bool, -} - -#[derive(Debug)] -pub struct StripeCheckoutSession { - pub url: Option, -} - -#[async_trait] -pub trait StripeClient: Send + Sync { - async fn list_customers_by_email(&self, email: &str) -> Result>; - - async fn get_customer(&self, customer_id: &StripeCustomerId) -> Result; - - async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result; - - async fn update_customer( - &self, - customer_id: &StripeCustomerId, - params: UpdateCustomerParams<'_>, - ) -> Result; - - async fn list_subscriptions_for_customer( - &self, - customer_id: &StripeCustomerId, - ) -> Result>; - - async fn get_subscription( - &self, - subscription_id: &StripeSubscriptionId, - ) -> Result; - - async fn create_subscription( - &self, - params: StripeCreateSubscriptionParams, - ) -> Result; - - async fn update_subscription( - &self, - subscription_id: &StripeSubscriptionId, - params: UpdateSubscriptionParams, - ) -> Result<()>; - - async fn cancel_subscription(&self, subscription_id: &StripeSubscriptionId) -> Result<()>; - - async fn list_prices(&self) -> Result>; - - async fn list_meters(&self) -> Result>; - - async fn create_meter_event(&self, params: StripeCreateMeterEventParams<'_>) -> Result<()>; - - async fn create_checkout_session( - &self, - params: StripeCreateCheckoutSessionParams<'_>, - ) -> Result; -} diff --git a/crates/collab/src/stripe_client/fake_stripe_client.rs b/crates/collab/src/stripe_client/fake_stripe_client.rs deleted file mode 100644 index 9bb08443ec6a5fd04ad11a8e24b1a71b03e4867b..0000000000000000000000000000000000000000 --- a/crates/collab/src/stripe_client/fake_stripe_client.rs +++ /dev/null @@ -1,247 +0,0 @@ -use std::sync::Arc; - -use anyhow::{Result, anyhow}; -use async_trait::async_trait; -use chrono::{Duration, Utc}; -use collections::HashMap; -use parking_lot::Mutex; -use uuid::Uuid; - -use crate::stripe_client::{ - CreateCustomerParams, StripeBillingAddressCollection, StripeCheckoutSession, - StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeClient, - StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, - StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, - StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate, - StripeMeter, StripeMeterId, StripePrice, StripePriceId, StripeSubscription, - StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, StripeTaxIdCollection, - UpdateCustomerParams, UpdateSubscriptionParams, -}; - -#[derive(Debug, Clone)] -pub struct StripeCreateMeterEventCall { - pub identifier: Arc, - pub event_name: Arc, - pub value: u64, - pub stripe_customer_id: StripeCustomerId, - pub timestamp: Option, -} - -#[derive(Debug, Clone)] -pub struct StripeCreateCheckoutSessionCall { - pub customer: Option, - pub client_reference_id: Option, - pub mode: Option, - pub line_items: Option>, - pub payment_method_collection: Option, - pub subscription_data: Option, - pub success_url: Option, - pub billing_address_collection: Option, - pub customer_update: Option, - pub tax_id_collection: Option, -} - -pub struct FakeStripeClient { - pub customers: Arc>>, - pub subscriptions: Arc>>, - pub update_subscription_calls: - Arc>>, - pub prices: Arc>>, - pub meters: Arc>>, - pub create_meter_event_calls: Arc>>, - pub create_checkout_session_calls: Arc>>, -} - -impl FakeStripeClient { - pub fn new() -> Self { - Self { - customers: Arc::new(Mutex::new(HashMap::default())), - subscriptions: Arc::new(Mutex::new(HashMap::default())), - update_subscription_calls: Arc::new(Mutex::new(Vec::new())), - prices: Arc::new(Mutex::new(HashMap::default())), - meters: Arc::new(Mutex::new(HashMap::default())), - create_meter_event_calls: Arc::new(Mutex::new(Vec::new())), - create_checkout_session_calls: Arc::new(Mutex::new(Vec::new())), - } - } -} - -#[async_trait] -impl StripeClient for FakeStripeClient { - async fn list_customers_by_email(&self, email: &str) -> Result> { - Ok(self - .customers - .lock() - .values() - .filter(|customer| customer.email.as_deref() == Some(email)) - .cloned() - .collect()) - } - - async fn get_customer(&self, customer_id: &StripeCustomerId) -> Result { - self.customers - .lock() - .get(customer_id) - .cloned() - .ok_or_else(|| anyhow!("no customer found for {customer_id:?}")) - } - - async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result { - let customer = StripeCustomer { - id: StripeCustomerId(format!("cus_{}", Uuid::new_v4()).into()), - email: params.email.map(|email| email.to_string()), - }; - - self.customers - .lock() - .insert(customer.id.clone(), customer.clone()); - - Ok(customer) - } - - async fn update_customer( - &self, - customer_id: &StripeCustomerId, - params: UpdateCustomerParams<'_>, - ) -> Result { - let mut customers = self.customers.lock(); - if let Some(customer) = customers.get_mut(customer_id) { - if let Some(email) = params.email { - customer.email = Some(email.to_string()); - } - Ok(customer.clone()) - } else { - Err(anyhow!("no customer found for {customer_id:?}")) - } - } - - async fn list_subscriptions_for_customer( - &self, - customer_id: &StripeCustomerId, - ) -> Result> { - let subscriptions = self - .subscriptions - .lock() - .values() - .filter(|subscription| subscription.customer == *customer_id) - .cloned() - .collect(); - - Ok(subscriptions) - } - - async fn get_subscription( - &self, - subscription_id: &StripeSubscriptionId, - ) -> Result { - self.subscriptions - .lock() - .get(subscription_id) - .cloned() - .ok_or_else(|| anyhow!("no subscription found for {subscription_id:?}")) - } - - async fn create_subscription( - &self, - params: StripeCreateSubscriptionParams, - ) -> Result { - let now = Utc::now(); - - let subscription = StripeSubscription { - id: StripeSubscriptionId(format!("sub_{}", Uuid::new_v4()).into()), - customer: params.customer, - status: stripe::SubscriptionStatus::Active, - current_period_start: now.timestamp(), - current_period_end: (now + Duration::days(30)).timestamp(), - items: params - .items - .into_iter() - .map(|item| StripeSubscriptionItem { - id: StripeSubscriptionItemId(format!("si_{}", Uuid::new_v4()).into()), - price: item - .price - .and_then(|price_id| self.prices.lock().get(&price_id).cloned()), - }) - .collect(), - cancel_at: None, - cancellation_details: None, - }; - - self.subscriptions - .lock() - .insert(subscription.id.clone(), subscription.clone()); - - Ok(subscription) - } - - async fn update_subscription( - &self, - subscription_id: &StripeSubscriptionId, - params: UpdateSubscriptionParams, - ) -> Result<()> { - let subscription = self.get_subscription(subscription_id).await?; - - self.update_subscription_calls - .lock() - .push((subscription.id, params)); - - Ok(()) - } - - async fn cancel_subscription(&self, subscription_id: &StripeSubscriptionId) -> Result<()> { - // TODO: Implement fake subscription cancellation. - let _ = subscription_id; - - Ok(()) - } - - async fn list_prices(&self) -> Result> { - let prices = self.prices.lock().values().cloned().collect(); - - Ok(prices) - } - - async fn list_meters(&self) -> Result> { - let meters = self.meters.lock().values().cloned().collect(); - - Ok(meters) - } - - async fn create_meter_event(&self, params: StripeCreateMeterEventParams<'_>) -> Result<()> { - self.create_meter_event_calls - .lock() - .push(StripeCreateMeterEventCall { - identifier: params.identifier.into(), - event_name: params.event_name.into(), - value: params.payload.value, - stripe_customer_id: params.payload.stripe_customer_id.clone(), - timestamp: params.timestamp, - }); - - Ok(()) - } - - async fn create_checkout_session( - &self, - params: StripeCreateCheckoutSessionParams<'_>, - ) -> Result { - self.create_checkout_session_calls - .lock() - .push(StripeCreateCheckoutSessionCall { - customer: params.customer.cloned(), - client_reference_id: params.client_reference_id.map(|id| id.to_string()), - mode: params.mode, - line_items: params.line_items, - payment_method_collection: params.payment_method_collection, - subscription_data: params.subscription_data, - success_url: params.success_url.map(|url| url.to_string()), - billing_address_collection: params.billing_address_collection, - customer_update: params.customer_update, - tax_id_collection: params.tax_id_collection, - }); - - Ok(StripeCheckoutSession { - url: Some("https://checkout.stripe.com/c/pay/cs_test_1".to_string()), - }) - } -} diff --git a/crates/collab/src/stripe_client/real_stripe_client.rs b/crates/collab/src/stripe_client/real_stripe_client.rs deleted file mode 100644 index 07c191ff30400ccbf4b73c4c84f09aa47e0fd9aa..0000000000000000000000000000000000000000 --- a/crates/collab/src/stripe_client/real_stripe_client.rs +++ /dev/null @@ -1,612 +0,0 @@ -use std::str::FromStr as _; -use std::sync::Arc; - -use anyhow::{Context as _, Result, anyhow}; -use async_trait::async_trait; -use serde::{Deserialize, Serialize}; -use stripe::{ - CancellationDetails, CancellationDetailsReason, CheckoutSession, CheckoutSessionMode, - CheckoutSessionPaymentMethodCollection, CreateCheckoutSession, CreateCheckoutSessionLineItems, - CreateCheckoutSessionSubscriptionData, CreateCheckoutSessionSubscriptionDataTrialSettings, - CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehavior, - CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehaviorMissingPaymentMethod, - CreateCustomer, CreateSubscriptionAutomaticTax, Customer, CustomerId, ListCustomers, Price, - PriceId, Recurring, Subscription, SubscriptionId, SubscriptionItem, SubscriptionItemId, - UpdateCustomer, UpdateSubscriptionItems, UpdateSubscriptionTrialSettings, - UpdateSubscriptionTrialSettingsEndBehavior, - UpdateSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, -}; - -use crate::stripe_client::{ - CreateCustomerParams, StripeAutomaticTax, StripeBillingAddressCollection, - StripeCancellationDetails, StripeCancellationDetailsReason, StripeCheckoutSession, - StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeClient, - StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, - StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, - StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate, - StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeCustomerUpdateShipping, - StripeMeter, StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription, - StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, - StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection, - UpdateCustomerParams, UpdateSubscriptionParams, -}; - -pub struct RealStripeClient { - client: Arc, -} - -impl RealStripeClient { - pub fn new(client: Arc) -> Self { - Self { client } - } -} - -#[async_trait] -impl StripeClient for RealStripeClient { - async fn list_customers_by_email(&self, email: &str) -> Result> { - let response = Customer::list( - &self.client, - &ListCustomers { - email: Some(email), - ..Default::default() - }, - ) - .await?; - - Ok(response - .data - .into_iter() - .map(StripeCustomer::from) - .collect()) - } - - async fn get_customer(&self, customer_id: &StripeCustomerId) -> Result { - let customer_id = customer_id.try_into()?; - - let customer = Customer::retrieve(&self.client, &customer_id, &[]).await?; - - Ok(StripeCustomer::from(customer)) - } - - async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result { - let customer = Customer::create( - &self.client, - CreateCustomer { - email: params.email, - ..Default::default() - }, - ) - .await?; - - Ok(StripeCustomer::from(customer)) - } - - async fn update_customer( - &self, - customer_id: &StripeCustomerId, - params: UpdateCustomerParams<'_>, - ) -> Result { - let customer = Customer::update( - &self.client, - &customer_id.try_into()?, - UpdateCustomer { - email: params.email, - ..Default::default() - }, - ) - .await?; - - Ok(StripeCustomer::from(customer)) - } - - async fn list_subscriptions_for_customer( - &self, - customer_id: &StripeCustomerId, - ) -> Result> { - let customer_id = customer_id.try_into()?; - - let subscriptions = stripe::Subscription::list( - &self.client, - &stripe::ListSubscriptions { - customer: Some(customer_id), - status: None, - ..Default::default() - }, - ) - .await?; - - Ok(subscriptions - .data - .into_iter() - .map(StripeSubscription::from) - .collect()) - } - - async fn get_subscription( - &self, - subscription_id: &StripeSubscriptionId, - ) -> Result { - let subscription_id = subscription_id.try_into()?; - - let subscription = Subscription::retrieve(&self.client, &subscription_id, &[]).await?; - - Ok(StripeSubscription::from(subscription)) - } - - async fn create_subscription( - &self, - params: StripeCreateSubscriptionParams, - ) -> Result { - let customer_id = params.customer.try_into()?; - - let mut create_subscription = stripe::CreateSubscription::new(customer_id); - create_subscription.items = Some( - params - .items - .into_iter() - .map(|item| stripe::CreateSubscriptionItems { - price: item.price.map(|price| price.to_string()), - quantity: item.quantity, - ..Default::default() - }) - .collect(), - ); - create_subscription.automatic_tax = params.automatic_tax.map(Into::into); - - let subscription = Subscription::create(&self.client, create_subscription).await?; - - Ok(StripeSubscription::from(subscription)) - } - - async fn update_subscription( - &self, - subscription_id: &StripeSubscriptionId, - params: UpdateSubscriptionParams, - ) -> Result<()> { - let subscription_id = subscription_id.try_into()?; - - stripe::Subscription::update( - &self.client, - &subscription_id, - stripe::UpdateSubscription { - items: params.items.map(|items| { - items - .into_iter() - .map(|item| UpdateSubscriptionItems { - price: item.price.map(|price| price.to_string()), - ..Default::default() - }) - .collect() - }), - trial_settings: params.trial_settings.map(Into::into), - ..Default::default() - }, - ) - .await?; - - Ok(()) - } - - async fn cancel_subscription(&self, subscription_id: &StripeSubscriptionId) -> Result<()> { - let subscription_id = subscription_id.try_into()?; - - Subscription::cancel( - &self.client, - &subscription_id, - stripe::CancelSubscription { - invoice_now: None, - ..Default::default() - }, - ) - .await?; - - Ok(()) - } - - async fn list_prices(&self) -> Result> { - let response = stripe::Price::list( - &self.client, - &stripe::ListPrices { - limit: Some(100), - ..Default::default() - }, - ) - .await?; - - Ok(response.data.into_iter().map(StripePrice::from).collect()) - } - - async fn list_meters(&self) -> Result> { - #[derive(Serialize)] - struct Params { - #[serde(skip_serializing_if = "Option::is_none")] - limit: Option, - } - - let response = self - .client - .get_query::, _>( - "/billing/meters", - Params { limit: Some(100) }, - ) - .await?; - - Ok(response.data) - } - - async fn create_meter_event(&self, params: StripeCreateMeterEventParams<'_>) -> Result<()> { - #[derive(Deserialize)] - struct StripeMeterEvent { - pub identifier: String, - } - - let identifier = params.identifier; - match self - .client - .post_form::("/billing/meter_events", params) - .await - { - Ok(_event) => Ok(()), - Err(stripe::StripeError::Stripe(error)) => { - if error.http_status == 400 - && error - .message - .as_ref() - .map_or(false, |message| message.contains(identifier)) - { - Ok(()) - } else { - Err(anyhow!(stripe::StripeError::Stripe(error))) - } - } - Err(error) => Err(anyhow!("failed to create meter event: {error:?}")), - } - } - - async fn create_checkout_session( - &self, - params: StripeCreateCheckoutSessionParams<'_>, - ) -> Result { - let params = params.try_into()?; - let session = CheckoutSession::create(&self.client, params).await?; - - Ok(session.into()) - } -} - -impl From for StripeCustomerId { - fn from(value: CustomerId) -> Self { - Self(value.as_str().into()) - } -} - -impl TryFrom for CustomerId { - type Error = anyhow::Error; - - fn try_from(value: StripeCustomerId) -> Result { - Self::from_str(value.0.as_ref()).context("failed to parse Stripe customer ID") - } -} - -impl TryFrom<&StripeCustomerId> for CustomerId { - type Error = anyhow::Error; - - fn try_from(value: &StripeCustomerId) -> Result { - Self::from_str(value.0.as_ref()).context("failed to parse Stripe customer ID") - } -} - -impl From for StripeCustomer { - fn from(value: Customer) -> Self { - StripeCustomer { - id: value.id.into(), - email: value.email, - } - } -} - -impl From for StripeSubscriptionId { - fn from(value: SubscriptionId) -> Self { - Self(value.as_str().into()) - } -} - -impl TryFrom<&StripeSubscriptionId> for SubscriptionId { - type Error = anyhow::Error; - - fn try_from(value: &StripeSubscriptionId) -> Result { - Self::from_str(value.0.as_ref()).context("failed to parse Stripe subscription ID") - } -} - -impl From for StripeSubscription { - fn from(value: Subscription) -> Self { - Self { - id: value.id.into(), - customer: value.customer.id().into(), - status: value.status, - current_period_start: value.current_period_start, - current_period_end: value.current_period_end, - items: value.items.data.into_iter().map(Into::into).collect(), - cancel_at: value.cancel_at, - cancellation_details: value.cancellation_details.map(Into::into), - } - } -} - -impl From for StripeCancellationDetails { - fn from(value: CancellationDetails) -> Self { - Self { - reason: value.reason.map(Into::into), - } - } -} - -impl From for StripeCancellationDetailsReason { - fn from(value: CancellationDetailsReason) -> Self { - match value { - CancellationDetailsReason::CancellationRequested => Self::CancellationRequested, - CancellationDetailsReason::PaymentDisputed => Self::PaymentDisputed, - CancellationDetailsReason::PaymentFailed => Self::PaymentFailed, - } - } -} - -impl From for StripeSubscriptionItemId { - fn from(value: SubscriptionItemId) -> Self { - Self(value.as_str().into()) - } -} - -impl From for StripeSubscriptionItem { - fn from(value: SubscriptionItem) -> Self { - Self { - id: value.id.into(), - price: value.price.map(Into::into), - } - } -} - -impl From for CreateSubscriptionAutomaticTax { - fn from(value: StripeAutomaticTax) -> Self { - Self { - enabled: value.enabled, - liability: None, - } - } -} - -impl From for UpdateSubscriptionTrialSettings { - fn from(value: StripeSubscriptionTrialSettings) -> Self { - Self { - end_behavior: value.end_behavior.into(), - } - } -} - -impl From - for UpdateSubscriptionTrialSettingsEndBehavior -{ - fn from(value: StripeSubscriptionTrialSettingsEndBehavior) -> Self { - Self { - missing_payment_method: value.missing_payment_method.into(), - } - } -} - -impl From - for UpdateSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod -{ - fn from(value: StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod) -> Self { - match value { - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Cancel => Self::Cancel, - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::CreateInvoice => { - Self::CreateInvoice - } - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Pause => Self::Pause, - } - } -} - -impl From for StripePriceId { - fn from(value: PriceId) -> Self { - Self(value.as_str().into()) - } -} - -impl TryFrom for PriceId { - type Error = anyhow::Error; - - fn try_from(value: StripePriceId) -> Result { - Self::from_str(value.0.as_ref()).context("failed to parse Stripe price ID") - } -} - -impl From for StripePrice { - fn from(value: Price) -> Self { - Self { - id: value.id.into(), - unit_amount: value.unit_amount, - lookup_key: value.lookup_key, - recurring: value.recurring.map(StripePriceRecurring::from), - } - } -} - -impl From for StripePriceRecurring { - fn from(value: Recurring) -> Self { - Self { meter: value.meter } - } -} - -impl<'a> TryFrom> for CreateCheckoutSession<'a> { - type Error = anyhow::Error; - - fn try_from(value: StripeCreateCheckoutSessionParams<'a>) -> Result { - Ok(Self { - customer: value - .customer - .map(|customer_id| customer_id.try_into()) - .transpose()?, - client_reference_id: value.client_reference_id, - mode: value.mode.map(Into::into), - line_items: value - .line_items - .map(|line_items| line_items.into_iter().map(Into::into).collect()), - payment_method_collection: value.payment_method_collection.map(Into::into), - subscription_data: value.subscription_data.map(Into::into), - success_url: value.success_url, - billing_address_collection: value.billing_address_collection.map(Into::into), - customer_update: value.customer_update.map(Into::into), - tax_id_collection: value.tax_id_collection.map(Into::into), - ..Default::default() - }) - } -} - -impl From for CheckoutSessionMode { - fn from(value: StripeCheckoutSessionMode) -> Self { - match value { - StripeCheckoutSessionMode::Payment => Self::Payment, - StripeCheckoutSessionMode::Setup => Self::Setup, - StripeCheckoutSessionMode::Subscription => Self::Subscription, - } - } -} - -impl From for CreateCheckoutSessionLineItems { - fn from(value: StripeCreateCheckoutSessionLineItems) -> Self { - Self { - price: value.price, - quantity: value.quantity, - ..Default::default() - } - } -} - -impl From for CheckoutSessionPaymentMethodCollection { - fn from(value: StripeCheckoutSessionPaymentMethodCollection) -> Self { - match value { - StripeCheckoutSessionPaymentMethodCollection::Always => Self::Always, - StripeCheckoutSessionPaymentMethodCollection::IfRequired => Self::IfRequired, - } - } -} - -impl From for CreateCheckoutSessionSubscriptionData { - fn from(value: StripeCreateCheckoutSessionSubscriptionData) -> Self { - Self { - trial_period_days: value.trial_period_days, - trial_settings: value.trial_settings.map(Into::into), - metadata: value.metadata, - ..Default::default() - } - } -} - -impl From for CreateCheckoutSessionSubscriptionDataTrialSettings { - fn from(value: StripeSubscriptionTrialSettings) -> Self { - Self { - end_behavior: value.end_behavior.into(), - } - } -} - -impl From - for CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehavior -{ - fn from(value: StripeSubscriptionTrialSettingsEndBehavior) -> Self { - Self { - missing_payment_method: value.missing_payment_method.into(), - } - } -} - -impl From - for CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehaviorMissingPaymentMethod -{ - fn from(value: StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod) -> Self { - match value { - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Cancel => Self::Cancel, - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::CreateInvoice => { - Self::CreateInvoice - } - StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Pause => Self::Pause, - } - } -} - -impl From for StripeCheckoutSession { - fn from(value: CheckoutSession) -> Self { - Self { url: value.url } - } -} - -impl From for stripe::CheckoutSessionBillingAddressCollection { - fn from(value: StripeBillingAddressCollection) -> Self { - match value { - StripeBillingAddressCollection::Auto => { - stripe::CheckoutSessionBillingAddressCollection::Auto - } - StripeBillingAddressCollection::Required => { - stripe::CheckoutSessionBillingAddressCollection::Required - } - } - } -} - -impl From for stripe::CreateCheckoutSessionCustomerUpdateAddress { - fn from(value: StripeCustomerUpdateAddress) -> Self { - match value { - StripeCustomerUpdateAddress::Auto => { - stripe::CreateCheckoutSessionCustomerUpdateAddress::Auto - } - StripeCustomerUpdateAddress::Never => { - stripe::CreateCheckoutSessionCustomerUpdateAddress::Never - } - } - } -} - -impl From for stripe::CreateCheckoutSessionCustomerUpdateName { - fn from(value: StripeCustomerUpdateName) -> Self { - match value { - StripeCustomerUpdateName::Auto => stripe::CreateCheckoutSessionCustomerUpdateName::Auto, - StripeCustomerUpdateName::Never => { - stripe::CreateCheckoutSessionCustomerUpdateName::Never - } - } - } -} - -impl From for stripe::CreateCheckoutSessionCustomerUpdateShipping { - fn from(value: StripeCustomerUpdateShipping) -> Self { - match value { - StripeCustomerUpdateShipping::Auto => { - stripe::CreateCheckoutSessionCustomerUpdateShipping::Auto - } - StripeCustomerUpdateShipping::Never => { - stripe::CreateCheckoutSessionCustomerUpdateShipping::Never - } - } - } -} - -impl From for stripe::CreateCheckoutSessionCustomerUpdate { - fn from(value: StripeCustomerUpdate) -> Self { - stripe::CreateCheckoutSessionCustomerUpdate { - address: value.address.map(Into::into), - name: value.name.map(Into::into), - shipping: value.shipping.map(Into::into), - } - } -} - -impl From for stripe::CreateCheckoutSessionTaxIdCollection { - fn from(value: StripeTaxIdCollection) -> Self { - stripe::CreateCheckoutSessionTaxIdCollection { - enabled: value.enabled, - } - } -} diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs index 8d5d076780733406904cd1c0431d56d6ebbc776f..ddf245b06f322b5c62ba83d56f05fbca65e2ba9d 100644 --- a/crates/collab/src/tests.rs +++ b/crates/collab/src/tests.rs @@ -8,7 +8,6 @@ mod channel_buffer_tests; mod channel_guest_tests; mod channel_message_tests; mod channel_tests; -// mod debug_panel_tests; mod editor_tests; mod following_tests; mod git_tests; @@ -18,7 +17,6 @@ mod random_channel_buffer_tests; mod random_project_collaboration_tests; mod randomized_test_helpers; mod remote_editing_collaboration_tests; -mod stripe_billing_tests; mod test_server; use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 7b95fdd45803554492e2541a83e1ede526e11753..59d66f1821e60ecbf3a7550c1385fa6de7ae047d 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -15,13 +15,14 @@ use editor::{ }, }; use fs::Fs; -use futures::{StreamExt, lock::Mutex}; +use futures::{SinkExt, StreamExt, channel::mpsc, lock::Mutex}; use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; use indoc::indoc; use language::{ FakeLspAdapter, language_settings::{AllLanguageSettings, InlayHintSettings}, }; +use lsp::LSP_REQUEST_TIMEOUT; use project::{ ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT, lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro}, @@ -1017,6 +1018,211 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T }) } +#[gpui::test] +async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + cx_b.update(editor::init); + + let command_name = "test_command"; + let capabilities = lsp::ServerCapabilities { + code_lens_provider: Some(lsp::CodeLensOptions { + resolve_provider: None, + }), + execute_command_provider: Some(lsp::ExecuteCommandOptions { + commands: vec![command_name.to_string()], + ..lsp::ExecuteCommandOptions::default() + }), + ..lsp::ServerCapabilities::default() + }; + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: capabilities.clone(), + ..FakeLspAdapter::default() + }, + ); + client_b.language_registry().add(rust_lang()); + client_b.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities, + ..FakeLspAdapter::default() + }, + ); + + client_a + .fs() + .insert_tree( + path!("/dir"), + json!({ + "one.rs": "const ONE: usize = 1;" + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let editor_b = workspace_b + .update_in(cx_b, |workspace, window, cx| { + workspace.open_path((worktree_id, "one.rs"), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let (lsp_store_b, buffer_b) = editor_b.update(cx_b, |editor, cx| { + let lsp_store = editor.project().unwrap().read(cx).lsp_store(); + let buffer = editor.buffer().read(cx).as_singleton().unwrap(); + (lsp_store, buffer) + }); + let fake_language_server = fake_language_servers.next().await.unwrap(); + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + let long_request_time = LSP_REQUEST_TIMEOUT / 2; + let (request_started_tx, mut request_started_rx) = mpsc::unbounded(); + let requests_started = Arc::new(AtomicUsize::new(0)); + let requests_completed = Arc::new(AtomicUsize::new(0)); + let _lens_requests = fake_language_server + .set_request_handler::({ + let request_started_tx = request_started_tx.clone(); + let requests_started = requests_started.clone(); + let requests_completed = requests_completed.clone(); + move |params, cx| { + let mut request_started_tx = request_started_tx.clone(); + let requests_started = requests_started.clone(); + let requests_completed = requests_completed.clone(); + async move { + assert_eq!( + params.text_document.uri.as_str(), + uri!("file:///dir/one.rs") + ); + requests_started.fetch_add(1, atomic::Ordering::Release); + request_started_tx.send(()).await.unwrap(); + cx.background_executor().timer(long_request_time).await; + let i = requests_completed.fetch_add(1, atomic::Ordering::Release) + 1; + Ok(Some(vec![lsp::CodeLens { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 9)), + command: Some(lsp::Command { + title: format!("LSP Command {i}"), + command: command_name.to_string(), + arguments: None, + }), + data: None, + }])) + } + } + }); + + // Move cursor to a location, this should trigger the code lens call. + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([7..7]) + }); + }); + let () = request_started_rx.next().await.unwrap(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 1, + "Selection change should have initiated the first request" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 0, + "Slow requests should be running still" + ); + let _first_task = lsp_store_b.update(cx_b, |lsp_store, cx| { + lsp_store + .forget_code_lens_task(buffer_b.read(cx).remote_id()) + .expect("Should have the fetch task started") + }); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([1..1]) + }); + }); + let () = request_started_rx.next().await.unwrap(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 2, + "Selection change should have initiated the second request" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 0, + "Slow requests should be running still" + ); + let _second_task = lsp_store_b.update(cx_b, |lsp_store, cx| { + lsp_store + .forget_code_lens_task(buffer_b.read(cx).remote_id()) + .expect("Should have the fetch task started for the 2nd time") + }); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([2..2]) + }); + }); + let () = request_started_rx.next().await.unwrap(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 3, + "Selection change should have initiated the third request" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 0, + "Slow requests should be running still" + ); + + _first_task.await.unwrap(); + _second_task.await.unwrap(); + cx_b.run_until_parked(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 3, + "No selection changes should trigger no more code lens requests" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 3, + "After enough time, all 3 LSP requests should have been served by the language server" + ); + let resulting_lens_actions = editor_b + .update(cx_b, |editor, cx| { + let lsp_store = editor.project().unwrap().read(cx).lsp_store(); + lsp_store.update(cx, |lsp_store, cx| { + lsp_store.code_lens_actions(&buffer_b, cx) + }) + }) + .await + .unwrap() + .unwrap(); + assert_eq!( + resulting_lens_actions.len(), + 1, + "Should have fetched one code lens action, but got: {resulting_lens_actions:?}" + ); + assert_eq!( + resulting_lens_actions.first().unwrap().lsp_action.title(), + "LSP Command 3", + "Only the final code lens action should be in the data" + ) +} + #[gpui::test(iterations = 10)] async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; @@ -2908,7 +3114,7 @@ async fn test_lsp_pull_diagnostics( { assert!( - diagnostics_pulls_result_ids.lock().await.len() > 0, + !diagnostics_pulls_result_ids.lock().await.is_empty(), "Initial diagnostics pulls should report None at least" ); assert_eq!( @@ -3593,7 +3799,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let abs_path = project_a.read_with(cx_a, |project, cx| { project .absolute_path(&project_path, cx) - .map(|path_buf| Arc::from(path_buf.to_owned())) + .map(Arc::from) .unwrap() }); @@ -3647,20 +3853,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let breakpoints_a = editor_a.update(cx_a, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); let breakpoints_b = editor_b.update(cx_b, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints_a.len()); @@ -3680,20 +3882,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let breakpoints_a = editor_a.update(cx_a, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); let breakpoints_b = editor_b.update(cx_b, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints_a.len()); @@ -3713,20 +3911,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let breakpoints_a = editor_a.update(cx_a, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); let breakpoints_b = editor_b.update(cx_b, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints_a.len()); @@ -3746,20 +3940,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let breakpoints_a = editor_a.update(cx_a, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); let breakpoints_b = editor_b.update(cx_b, |editor, cx| { editor .breakpoint_store() - .clone() .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(0, breakpoints_a.len()); diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 5a2c40b890cfe32510347c33a1257af4cbea0768..5c732530480a14ab28e231aa0fae1b79ef2703fb 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -3208,7 +3208,7 @@ async fn test_fs_operations( }) .await .unwrap() - .to_included() + .into_included() .unwrap(); worktree_a.read_with(cx_a, |worktree, _| { @@ -3237,7 +3237,7 @@ async fn test_fs_operations( }) .await .unwrap() - .to_included() + .into_included() .unwrap(); worktree_a.read_with(cx_a, |worktree, _| { @@ -3266,7 +3266,7 @@ async fn test_fs_operations( }) .await .unwrap() - .to_included() + .into_included() .unwrap(); worktree_a.read_with(cx_a, |worktree, _| { @@ -3295,7 +3295,7 @@ async fn test_fs_operations( }) .await .unwrap() - .to_included() + .into_included() .unwrap(); project_b @@ -3304,7 +3304,7 @@ async fn test_fs_operations( }) .await .unwrap() - .to_included() + .into_included() .unwrap(); project_b @@ -3313,7 +3313,7 @@ async fn test_fs_operations( }) .await .unwrap() - .to_included() + .into_included() .unwrap(); worktree_a.read_with(cx_a, |worktree, _| { @@ -4850,6 +4850,7 @@ async fn test_definition( let definitions_1 = project_b .update(cx_b, |p, cx| p.definitions(&buffer_b, 23, cx)) .await + .unwrap() .unwrap(); cx_b.read(|cx| { assert_eq!( @@ -4885,6 +4886,7 @@ async fn test_definition( let definitions_2 = project_b .update(cx_b, |p, cx| p.definitions(&buffer_b, 33, cx)) .await + .unwrap() .unwrap(); cx_b.read(|cx| { assert_eq!(definitions_2.len(), 1); @@ -4922,6 +4924,7 @@ async fn test_definition( let type_definitions = project_b .update(cx_b, |p, cx| p.type_definitions(&buffer_b, 7, cx)) .await + .unwrap() .unwrap(); cx_b.read(|cx| { assert_eq!( @@ -4970,7 +4973,7 @@ async fn test_references( "Rust", FakeLspAdapter { name: "my-fake-lsp-adapter", - capabilities: capabilities, + capabilities, ..FakeLspAdapter::default() }, ); @@ -5060,7 +5063,7 @@ async fn test_references( ]))) .unwrap(); - let references = references.await.unwrap(); + let references = references.await.unwrap().unwrap(); executor.run_until_parked(); project_b.read_with(cx_b, |project, cx| { // User is informed that a request is no longer pending. @@ -5104,7 +5107,7 @@ async fn test_references( lsp_response_tx .unbounded_send(Err(anyhow!("can't find references"))) .unwrap(); - assert_eq!(references.await.unwrap(), []); + assert_eq!(references.await.unwrap().unwrap(), []); // User is informed that the request is no longer pending. executor.run_until_parked(); @@ -5505,7 +5508,8 @@ async fn test_lsp_hover( // Request hover information as the guest. let mut hovers = project_b .update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx)) - .await; + .await + .unwrap(); assert_eq!( hovers.len(), 2, @@ -5764,7 +5768,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx)); } - let definitions = definitions.await.unwrap(); + let definitions = definitions.await.unwrap().unwrap(); assert_eq!( definitions.len(), 1, diff --git a/crates/collab/src/tests/random_channel_buffer_tests.rs b/crates/collab/src/tests/random_channel_buffer_tests.rs index c283a9fcd1741ad62c15e4c514df0a81ffb42062..6fcd6d75cd0d827296f555bfa54c18dac518a3be 100644 --- a/crates/collab/src/tests/random_channel_buffer_tests.rs +++ b/crates/collab/src/tests/random_channel_buffer_tests.rs @@ -266,7 +266,7 @@ impl RandomizedTest for RandomChannelBufferTest { "client {user_id} has different text than client {prev_user_id} for channel {channel_name}", ); } else { - prev_text = Some((user_id, text.clone())); + prev_text = Some((user_id, text)); } // Assert that all clients and the server agree about who is present in the diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 4d94d041b9b5ca5e6d0ed3bd1f54b5f86f224c56..ac5c4c54ca570bf5545505419cb20a021ca97202 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -304,7 +304,7 @@ impl RandomizedTest for ProjectCollaborationTest { let worktree = worktree.read(cx); worktree.is_visible() && worktree.entries(false, 0).any(|e| e.is_file()) - && worktree.root_entry().map_or(false, |e| e.is_dir()) + && worktree.root_entry().is_some_and(|e| e.is_dir()) }) .choose(rng) }); @@ -643,7 +643,7 @@ impl RandomizedTest for ProjectCollaborationTest { ); let project = project.await?; - client.dev_server_projects_mut().push(project.clone()); + client.dev_server_projects_mut().push(project); } ClientOperation::CreateWorktreeEntry { @@ -1162,8 +1162,8 @@ impl RandomizedTest for ProjectCollaborationTest { Some((project, cx)) }); - if !guest_project.is_disconnected(cx) { - if let Some((host_project, host_cx)) = host_project { + if !guest_project.is_disconnected(cx) + && let Some((host_project, host_cx)) = host_project { let host_worktree_snapshots = host_project.read_with(host_cx, |host_project, cx| { host_project @@ -1235,7 +1235,6 @@ impl RandomizedTest for ProjectCollaborationTest { ); } } - } for buffer in guest_project.opened_buffers(cx) { let buffer = buffer.read(cx); diff --git a/crates/collab/src/tests/randomized_test_helpers.rs b/crates/collab/src/tests/randomized_test_helpers.rs index cabf10cfbcec2e13a322ed742745b410ba760fd9..d6c299a6a9ed4e0439573e9b33fabe8ff122963d 100644 --- a/crates/collab/src/tests/randomized_test_helpers.rs +++ b/crates/collab/src/tests/randomized_test_helpers.rs @@ -198,11 +198,11 @@ pub async fn run_randomized_test( } pub fn save_randomized_test_plan() { - if let Some(serialize_plan) = LAST_PLAN.lock().take() { - if let Some(path) = plan_save_path() { - eprintln!("saved test plan to path {:?}", path); - std::fs::write(path, serialize_plan()).unwrap(); - } + if let Some(serialize_plan) = LAST_PLAN.lock().take() + && let Some(path) = plan_save_path() + { + eprintln!("saved test plan to path {:?}", path); + std::fs::write(path, serialize_plan()).unwrap(); } } @@ -290,10 +290,9 @@ impl TestPlan { if let StoredOperation::Client { user_id, batch_id, .. } = operation + && batch_id == current_batch_id { - if batch_id == current_batch_id { - return Some(user_id); - } + return Some(user_id); } None })); @@ -366,10 +365,9 @@ impl TestPlan { }, applied, ) = stored_operation + && user_id == ¤t_user_id { - if user_id == ¤t_user_id { - return Some((operation.clone(), applied.clone())); - } + return Some((operation.clone(), applied.clone())); } } None @@ -550,11 +548,11 @@ impl TestPlan { .unwrap(); let pool = server.connection_pool.lock(); for contact in contacts { - if let db::Contact::Accepted { user_id, busy, .. } = contact { - if user_id == removed_user_id { - assert!(!pool.is_user_online(user_id)); - assert!(!busy); - } + if let db::Contact::Accepted { user_id, busy, .. } = contact + && user_id == removed_user_id + { + assert!(!pool.is_user_online(user_id)); + assert!(!busy); } } } diff --git a/crates/collab/src/tests/stripe_billing_tests.rs b/crates/collab/src/tests/stripe_billing_tests.rs deleted file mode 100644 index bb84bedfcfc1fb4f95724f60bbd80707b12c215a..0000000000000000000000000000000000000000 --- a/crates/collab/src/tests/stripe_billing_tests.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::sync::Arc; - -use pretty_assertions::assert_eq; - -use crate::stripe_billing::StripeBilling; -use crate::stripe_client::{FakeStripeClient, StripePrice, StripePriceId, StripePriceRecurring}; - -fn make_stripe_billing() -> (StripeBilling, Arc) { - let stripe_client = Arc::new(FakeStripeClient::new()); - let stripe_billing = StripeBilling::test(stripe_client.clone()); - - (stripe_billing, stripe_client) -} - -#[gpui::test] -async fn test_initialize() { - let (stripe_billing, stripe_client) = make_stripe_billing(); - - // Add test prices - let price1 = StripePrice { - id: StripePriceId("price_1".into()), - unit_amount: Some(1_000), - lookup_key: Some("zed-pro".to_string()), - recurring: None, - }; - let price2 = StripePrice { - id: StripePriceId("price_2".into()), - unit_amount: Some(0), - lookup_key: Some("zed-free".to_string()), - recurring: None, - }; - let price3 = StripePrice { - id: StripePriceId("price_3".into()), - unit_amount: Some(500), - lookup_key: None, - recurring: Some(StripePriceRecurring { - meter: Some("meter_1".to_string()), - }), - }; - stripe_client - .prices - .lock() - .insert(price1.id.clone(), price1); - stripe_client - .prices - .lock() - .insert(price2.id.clone(), price2); - stripe_client - .prices - .lock() - .insert(price3.id.clone(), price3); - - // Initialize the billing system - stripe_billing.initialize().await.unwrap(); - - // Verify that prices can be found by lookup key - let zed_pro_price_id = stripe_billing.zed_pro_price_id().await.unwrap(); - assert_eq!(zed_pro_price_id.to_string(), "price_1"); - - let zed_free_price_id = stripe_billing.zed_free_price_id().await.unwrap(); - assert_eq!(zed_free_price_id.to_string(), "price_2"); - - // Verify that a price can be found by lookup key - let zed_pro_price = stripe_billing - .find_price_by_lookup_key("zed-pro") - .await - .unwrap(); - assert_eq!(zed_pro_price.id.to_string(), "price_1"); - assert_eq!(zed_pro_price.unit_amount, Some(1_000)); - - // Verify that finding a non-existent lookup key returns an error - let result = stripe_billing - .find_price_by_lookup_key("non-existent") - .await; - assert!(result.is_err()); -} - -#[gpui::test] -async fn test_find_or_create_customer_by_email() { - let (stripe_billing, stripe_client) = make_stripe_billing(); - - // Create a customer with an email that doesn't yet correspond to a customer. - { - let email = "user@example.com"; - - let customer_id = stripe_billing - .find_or_create_customer_by_email(Some(email)) - .await - .unwrap(); - - let customer = stripe_client - .customers - .lock() - .get(&customer_id) - .unwrap() - .clone(); - assert_eq!(customer.email.as_deref(), Some(email)); - } - - // Create a customer with an email that corresponds to an existing customer. - { - let email = "user2@example.com"; - - let existing_customer_id = stripe_billing - .find_or_create_customer_by_email(Some(email)) - .await - .unwrap(); - - let customer_id = stripe_billing - .find_or_create_customer_by_email(Some(email)) - .await - .unwrap(); - assert_eq!(customer_id, existing_customer_id); - - let customer = stripe_client - .customers - .lock() - .get(&customer_id) - .unwrap() - .clone(); - assert_eq!(customer.email.as_deref(), Some(email)); - } -} diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index f5a0e8ea81f0befbb3bae44ab516a7b8f4b04b52..fd5e3eefc158034e8b15dc3fd7e401b1041fe08e 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -1,4 +1,3 @@ -use crate::stripe_client::FakeStripeClient; use crate::{ AppState, Config, db::{NewUserParams, UserId, tests::TestDb}, @@ -371,8 +370,8 @@ impl TestServer { let client = TestClient { app_state, username: name.to_string(), - channel_store: cx.read(ChannelStore::global).clone(), - notification_store: cx.read(NotificationStore::global).clone(), + channel_store: cx.read(ChannelStore::global), + notification_store: cx.read(NotificationStore::global), state: Default::default(), }; client.wait_for_current_user(cx).await; @@ -566,12 +565,8 @@ impl TestServer { ) -> Arc { Arc::new(AppState { db: test_db.db().clone(), - llm_db: None, livekit_client: Some(Arc::new(livekit_test_server.create_api_client())), blob_store_client: None, - real_stripe_client: None, - stripe_client: Some(Arc::new(FakeStripeClient::new())), - stripe_billing: None, executor, kinesis_client: None, config: Config { @@ -608,7 +603,6 @@ impl TestServer { auto_join_channel_id: None, migrations_path: None, seed_path: None, - stripe_api_key: None, supermaven_admin_api_key: None, user_backfiller_github_access_token: None, kinesis_region: None, @@ -903,7 +897,7 @@ impl TestClient { let window = cx.update(|cx| cx.active_window().unwrap().downcast::().unwrap()); let entity = window.root(cx).unwrap(); - let cx = VisualTestContext::from_window(*window.deref(), cx).as_mut(); + let cx = VisualTestContext::from_window(*window.deref(), cx).into_mut(); // it might be nice to try and cleanup these at the end of each test. (entity, cx) } diff --git a/crates/collab/src/user_backfiller.rs b/crates/collab/src/user_backfiller.rs index 71b99a3d4c62560597ce47de926816e741507e44..569a298c9cd5bca6c65e5b7a39b45a784635ad35 100644 --- a/crates/collab/src/user_backfiller.rs +++ b/crates/collab/src/user_backfiller.rs @@ -130,17 +130,17 @@ impl UserBackfiller { .and_then(|value| value.parse::().ok()) .and_then(|value| DateTime::from_timestamp(value, 0)); - if rate_limit_remaining == Some(0) { - if let Some(reset_at) = rate_limit_reset { - let now = Utc::now(); - if reset_at > now { - let sleep_duration = reset_at - now; - log::info!( - "rate limit reached. Sleeping for {} seconds", - sleep_duration.num_seconds() - ); - self.executor.sleep(sleep_duration.to_std().unwrap()).await; - } + if rate_limit_remaining == Some(0) + && let Some(reset_at) = rate_limit_reset + { + let now = Utc::now(); + if reset_at > now { + let sleep_duration = reset_at - now; + log::info!( + "rate limit reached. Sleeping for {} seconds", + sleep_duration.num_seconds() + ); + self.executor.sleep(sleep_duration.to_std().unwrap()).await; } } diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index b86d72d92faede8c52e40a8e209fde5bf1ea9f0b..61b3e05e48a9fe3da35957b05fcd7dbf7206f146 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -66,7 +66,7 @@ impl ChannelView { channel_id, link_position, pane.clone(), - workspace.clone(), + workspace, window, cx, ); @@ -107,43 +107,32 @@ impl ChannelView { .find(|view| view.read(cx).channel_buffer.read(cx).remote_id(cx) == buffer_id); // If this channel buffer is already open in this pane, just return it. - if let Some(existing_view) = existing_view.clone() { - if existing_view.read(cx).channel_buffer == channel_view.read(cx).channel_buffer - { - if let Some(link_position) = link_position { - existing_view.update(cx, |channel_view, cx| { - channel_view.focus_position_from_link( - link_position, - true, - window, - cx, - ) - }); - } - return existing_view; + if let Some(existing_view) = existing_view.clone() + && existing_view.read(cx).channel_buffer == channel_view.read(cx).channel_buffer + { + if let Some(link_position) = link_position { + existing_view.update(cx, |channel_view, cx| { + channel_view.focus_position_from_link(link_position, true, window, cx) + }); } + return existing_view; } // If the pane contained a disconnected view for this channel buffer, // replace that. - if let Some(existing_item) = existing_view { - if let Some(ix) = pane.index_for_item(&existing_item) { - pane.close_item_by_id( - existing_item.entity_id(), - SaveIntent::Skip, - window, - cx, - ) + if let Some(existing_item) = existing_view + && let Some(ix) = pane.index_for_item(&existing_item) + { + pane.close_item_by_id(existing_item.entity_id(), SaveIntent::Skip, window, cx) .detach(); - pane.add_item( - Box::new(channel_view.clone()), - true, - true, - Some(ix), - window, - cx, - ); - } + pane.add_item( + Box::new(channel_view.clone()), + true, + true, + Some(ix), + window, + cx, + ); } if let Some(link_position) = link_position { @@ -259,26 +248,21 @@ impl ChannelView { .editor .update(cx, |editor, cx| editor.snapshot(window, cx)); - if let Some(outline) = snapshot.buffer_snapshot.outline(None) { - if let Some(item) = outline + if let Some(outline) = snapshot.buffer_snapshot.outline(None) + && let Some(item) = outline .items .iter() .find(|item| &Channel::slug(&item.text).to_lowercase() == &position) - { - self.editor.update(cx, |editor, cx| { - editor.change_selections( - SelectionEffects::scroll(Autoscroll::focused()), - window, - cx, - |s| { - s.replace_cursors_with(|map| { - vec![item.range.start.to_display_point(map)] - }) - }, - ) - }); - return; - } + { + self.editor.update(cx, |editor, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::focused()), + window, + cx, + |s| s.replace_cursors_with(|map| vec![item.range.start.to_display_point(map)]), + ) + }); + return; } if !first_attempt { diff --git a/crates/collab_ui/src/chat_panel.rs b/crates/collab_ui/src/chat_panel.rs index 51d9f003f813212d40ff8e0716c86b1439fd4de6..8aaf6c0aa21f0b677ec091880fd8be674be1d6fe 100644 --- a/crates/collab_ui/src/chat_panel.rs +++ b/crates/collab_ui/src/chat_panel.rs @@ -287,19 +287,20 @@ impl ChatPanel { } fn acknowledge_last_message(&mut self, cx: &mut Context) { - if self.active && self.is_scrolled_to_bottom { - if let Some((chat, _)) = &self.active_chat { - if let Some(channel_id) = self.channel_id(cx) { - self.last_acknowledged_message_id = self - .channel_store - .read(cx) - .last_acknowledge_message_id(channel_id); - } - - chat.update(cx, |chat, cx| { - chat.acknowledge_last_message(cx); - }); + if self.active + && self.is_scrolled_to_bottom + && let Some((chat, _)) = &self.active_chat + { + if let Some(channel_id) = self.channel_id(cx) { + self.last_acknowledged_message_id = self + .channel_store + .read(cx) + .last_acknowledge_message_id(channel_id); } + + chat.update(cx, |chat, cx| { + chat.acknowledge_last_message(cx); + }); } } @@ -405,14 +406,13 @@ impl ChatPanel { && last_message.id != this_message.id && duration_since_last_message < Duration::from_secs(5 * 60); - if let ChannelMessageId::Saved(id) = this_message.id { - if this_message + if let ChannelMessageId::Saved(id) = this_message.id + && this_message .mentions .iter() .any(|(_, user_id)| Some(*user_id) == self.client.user_id()) - { - active_chat.acknowledge_message(id); - } + { + active_chat.acknowledge_message(id); } (this_message, is_continuation_from_previous, is_admin) @@ -674,7 +674,7 @@ impl ChatPanel { }) }) .when_some(message_id, |el, message_id| { - let this = cx.entity().clone(); + let this = cx.entity(); el.child( self.render_popover_button( @@ -871,34 +871,33 @@ impl ChatPanel { scroll_to_message_id.or(this.last_acknowledged_message_id) })?; - if let Some(message_id) = scroll_to_message_id { - if let Some(item_ix) = + if let Some(message_id) = scroll_to_message_id + && let Some(item_ix) = ChannelChat::load_history_since_message(chat.clone(), message_id, cx.clone()) .await - { - this.update(cx, |this, cx| { - if let Some(highlight_message_id) = highlight_message_id { - let task = cx.spawn(async move |this, cx| { - cx.background_executor().timer(Duration::from_secs(2)).await; - this.update(cx, |this, cx| { - this.highlighted_message.take(); - cx.notify(); - }) - .ok(); - }); - - this.highlighted_message = Some((highlight_message_id, task)); - } + { + this.update(cx, |this, cx| { + if let Some(highlight_message_id) = highlight_message_id { + let task = cx.spawn(async move |this, cx| { + cx.background_executor().timer(Duration::from_secs(2)).await; + this.update(cx, |this, cx| { + this.highlighted_message.take(); + cx.notify(); + }) + .ok(); + }); - if this.active_chat.as_ref().map_or(false, |(c, _)| *c == chat) { - this.message_list.scroll_to(ListOffset { - item_ix, - offset_in_item: px(0.0), - }); - cx.notify(); - } - })?; - } + this.highlighted_message = Some((highlight_message_id, task)); + } + + if this.active_chat.as_ref().is_some_and(|(c, _)| *c == chat) { + this.message_list.scroll_to(ListOffset { + item_ix, + offset_in_item: px(0.0), + }); + cx.notify(); + } + })?; } Ok(()) @@ -1039,7 +1038,7 @@ impl Render for ChatPanel { .cloned(); el.when_some(reply_message, |el, reply_message| { - let user_being_replied_to = reply_message.sender.clone(); + let user_being_replied_to = reply_message.sender; el.child( h_flex() @@ -1187,7 +1186,7 @@ impl Panel for ChatPanel { let is_in_call = ActiveCall::global(cx) .read(cx) .room() - .map_or(false, |room| room.read(cx).contains_guests()); + .is_some_and(|room| room.read(cx).contains_guests()); self.active || is_in_call } diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 03d39cb8ced169f59167b1a1f6e91102a268a37d..5fead5bcf10cc62dc6f60414978366cb2eac313b 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -241,38 +241,36 @@ impl MessageEditor { ) -> Task>> { if let Some((start_anchor, query, candidates)) = self.collect_mention_candidates(buffer, end_anchor, cx) + && !candidates.is_empty() { - if !candidates.is_empty() { - return cx.spawn(async move |_, cx| { - let completion_response = Self::completions_for_candidates( - &cx, - query.as_str(), - &candidates, - start_anchor..end_anchor, - Self::completion_for_mention, - ) - .await; - Ok(vec![completion_response]) - }); - } + return cx.spawn(async move |_, cx| { + let completion_response = Self::completions_for_candidates( + cx, + query.as_str(), + &candidates, + start_anchor..end_anchor, + Self::completion_for_mention, + ) + .await; + Ok(vec![completion_response]) + }); } if let Some((start_anchor, query, candidates)) = self.collect_emoji_candidates(buffer, end_anchor, cx) + && !candidates.is_empty() { - if !candidates.is_empty() { - return cx.spawn(async move |_, cx| { - let completion_response = Self::completions_for_candidates( - &cx, - query.as_str(), - candidates, - start_anchor..end_anchor, - Self::completion_for_emoji, - ) - .await; - Ok(vec![completion_response]) - }); - } + return cx.spawn(async move |_, cx| { + let completion_response = Self::completions_for_candidates( + cx, + query.as_str(), + candidates, + start_anchor..end_anchor, + Self::completion_for_emoji, + ) + .await; + Ok(vec![completion_response]) + }); } Task::ready(Ok(vec![CompletionResponse { @@ -399,11 +397,10 @@ impl MessageEditor { ) -> Option<(Anchor, String, &'static [StringMatchCandidate])> { static EMOJI_FUZZY_MATCH_CANDIDATES: LazyLock> = LazyLock::new(|| { - let emojis = emojis::iter() + emojis::iter() .flat_map(|s| s.shortcodes()) .map(|emoji| StringMatchCandidate::new(0, emoji)) - .collect::>(); - emojis + .collect::>() }); let end_offset = end_anchor.to_offset(buffer.read(cx)); @@ -474,18 +471,17 @@ impl MessageEditor { for range in ranges { text.clear(); text.extend(buffer.text_for_range(range.clone())); - if let Some(username) = text.strip_prefix('@') { - if let Some(user) = this + if let Some(username) = text.strip_prefix('@') + && let Some(user) = this .user_store .read(cx) .cached_user_by_github_login(username) - { - let start = multi_buffer.anchor_after(range.start); - let end = multi_buffer.anchor_after(range.end); + { + let start = multi_buffer.anchor_after(range.start); + let end = multi_buffer.anchor_after(range.end); - mentioned_user_ids.push(user.id); - anchor_ranges.push(start..end); - } + mentioned_user_ids.push(user.id); + anchor_ranges.push(start..end); } } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 51e4ff8965ee253f2d6bcb858ec6881b8a1cadc4..cd37549783118910048d1a9bc85adcd0e593209b 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -95,7 +95,7 @@ pub fn init(cx: &mut App) { .and_then(|room| room.read(cx).channel_id()); if let Some(channel_id) = channel_id { - let workspace = cx.entity().clone(); + let workspace = cx.entity(); window.defer(cx, move |window, cx| { ChannelView::open(channel_id, None, workspace, window, cx) .detach_and_log_err(cx) @@ -311,10 +311,10 @@ impl CollabPanel { window, |this: &mut Self, _, event, window, cx| { if let editor::EditorEvent::Blurred = event { - if let Some(state) = &this.channel_editing_state { - if state.pending_name().is_some() { - return; - } + if let Some(state) = &this.channel_editing_state + && state.pending_name().is_some() + { + return; } this.take_editing_state(window, cx); this.update_entries(false, cx); @@ -491,11 +491,11 @@ impl CollabPanel { if !self.collapsed_sections.contains(&Section::ActiveCall) { let room = room.read(cx); - if query.is_empty() { - if let Some(channel_id) = room.channel_id() { - self.entries.push(ListEntry::ChannelNotes { channel_id }); - self.entries.push(ListEntry::ChannelChat { channel_id }); - } + if query.is_empty() + && let Some(channel_id) = room.channel_id() + { + self.entries.push(ListEntry::ChannelNotes { channel_id }); + self.entries.push(ListEntry::ChannelChat { channel_id }); } // Populate the active user. @@ -639,10 +639,10 @@ impl CollabPanel { &Default::default(), executor.clone(), )); - if let Some(state) = &self.channel_editing_state { - if matches!(state, ChannelEditingState::Create { location: None, .. }) { - self.entries.push(ListEntry::ChannelEditor { depth: 0 }); - } + if let Some(state) = &self.channel_editing_state + && matches!(state, ChannelEditingState::Create { location: None, .. }) + { + self.entries.push(ListEntry::ChannelEditor { depth: 0 }); } let mut collapse_depth = None; for mat in matches { @@ -664,9 +664,7 @@ impl CollabPanel { let has_children = channel_store .channel_at_index(mat.candidate_id + 1) - .map_or(false, |next_channel| { - next_channel.parent_path.ends_with(&[channel.id]) - }); + .is_some_and(|next_channel| next_channel.parent_path.ends_with(&[channel.id])); match &self.channel_editing_state { Some(ChannelEditingState::Create { @@ -1125,7 +1123,7 @@ impl CollabPanel { } fn has_subchannels(&self, ix: usize) -> bool { - self.entries.get(ix).map_or(false, |entry| { + self.entries.get(ix).is_some_and(|entry| { if let ListEntry::Channel { has_children, .. } = entry { *has_children } else { @@ -1142,7 +1140,7 @@ impl CollabPanel { window: &mut Window, cx: &mut Context, ) { - let this = cx.entity().clone(); + let this = cx.entity(); if !(role == proto::ChannelRole::Guest || role == proto::ChannelRole::Talker || role == proto::ChannelRole::Member) @@ -1272,7 +1270,7 @@ impl CollabPanel { .channel_for_id(clipboard.channel_id) .map(|channel| channel.name.clone()) }); - let this = cx.entity().clone(); + let this = cx.entity(); let context_menu = ContextMenu::build(window, cx, |mut context_menu, window, cx| { if self.has_subchannels(ix) { @@ -1439,7 +1437,7 @@ impl CollabPanel { window: &mut Window, cx: &mut Context, ) { - let this = cx.entity().clone(); + let this = cx.entity(); let in_room = ActiveCall::global(cx).read(cx).room().is_some(); let context_menu = ContextMenu::build(window, cx, |mut context_menu, _, _| { @@ -1552,98 +1550,93 @@ impl CollabPanel { return; } - if let Some(selection) = self.selection { - if let Some(entry) = self.entries.get(selection) { - match entry { - ListEntry::Header(section) => match section { - Section::ActiveCall => Self::leave_call(window, cx), - Section::Channels => self.new_root_channel(window, cx), - Section::Contacts => self.toggle_contact_finder(window, cx), - Section::ContactRequests - | Section::Online - | Section::Offline - | Section::ChannelInvites => { - self.toggle_section_expanded(*section, cx); - } - }, - ListEntry::Contact { contact, calling } => { - if contact.online && !contact.busy && !calling { - self.call(contact.user.id, window, cx); - } + if let Some(selection) = self.selection + && let Some(entry) = self.entries.get(selection) + { + match entry { + ListEntry::Header(section) => match section { + Section::ActiveCall => Self::leave_call(window, cx), + Section::Channels => self.new_root_channel(window, cx), + Section::Contacts => self.toggle_contact_finder(window, cx), + Section::ContactRequests + | Section::Online + | Section::Offline + | Section::ChannelInvites => { + self.toggle_section_expanded(*section, cx); } - ListEntry::ParticipantProject { - project_id, - host_user_id, - .. - } => { - if let Some(workspace) = self.workspace.upgrade() { - let app_state = workspace.read(cx).app_state().clone(); - workspace::join_in_room_project( - *project_id, - *host_user_id, - app_state, - cx, - ) + }, + ListEntry::Contact { contact, calling } => { + if contact.online && !contact.busy && !calling { + self.call(contact.user.id, window, cx); + } + } + ListEntry::ParticipantProject { + project_id, + host_user_id, + .. + } => { + if let Some(workspace) = self.workspace.upgrade() { + let app_state = workspace.read(cx).app_state().clone(); + workspace::join_in_room_project(*project_id, *host_user_id, app_state, cx) .detach_and_prompt_err( "Failed to join project", window, cx, |_, _, _| None, ); - } - } - ListEntry::ParticipantScreen { peer_id, .. } => { - let Some(peer_id) = peer_id else { - return; - }; - if let Some(workspace) = self.workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - workspace.open_shared_screen(*peer_id, window, cx) - }); - } } - ListEntry::Channel { channel, .. } => { - let is_active = maybe!({ - let call_channel = ActiveCall::global(cx) - .read(cx) - .room()? - .read(cx) - .channel_id()?; - - Some(call_channel == channel.id) - }) - .unwrap_or(false); - if is_active { - self.open_channel_notes(channel.id, window, cx) - } else { - self.join_channel(channel.id, window, cx) - } - } - ListEntry::ContactPlaceholder => self.toggle_contact_finder(window, cx), - ListEntry::CallParticipant { user, peer_id, .. } => { - if Some(user) == self.user_store.read(cx).current_user().as_ref() { - Self::leave_call(window, cx); - } else if let Some(peer_id) = peer_id { - self.workspace - .update(cx, |workspace, cx| workspace.follow(*peer_id, window, cx)) - .ok(); - } - } - ListEntry::IncomingRequest(user) => { - self.respond_to_contact_request(user.id, true, window, cx) - } - ListEntry::ChannelInvite(channel) => { - self.respond_to_channel_invite(channel.id, true, cx) + } + ListEntry::ParticipantScreen { peer_id, .. } => { + let Some(peer_id) = peer_id else { + return; + }; + if let Some(workspace) = self.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.open_shared_screen(*peer_id, window, cx) + }); } - ListEntry::ChannelNotes { channel_id } => { - self.open_channel_notes(*channel_id, window, cx) + } + ListEntry::Channel { channel, .. } => { + let is_active = maybe!({ + let call_channel = ActiveCall::global(cx) + .read(cx) + .room()? + .read(cx) + .channel_id()?; + + Some(call_channel == channel.id) + }) + .unwrap_or(false); + if is_active { + self.open_channel_notes(channel.id, window, cx) + } else { + self.join_channel(channel.id, window, cx) } - ListEntry::ChannelChat { channel_id } => { - self.join_channel_chat(*channel_id, window, cx) + } + ListEntry::ContactPlaceholder => self.toggle_contact_finder(window, cx), + ListEntry::CallParticipant { user, peer_id, .. } => { + if Some(user) == self.user_store.read(cx).current_user().as_ref() { + Self::leave_call(window, cx); + } else if let Some(peer_id) = peer_id { + self.workspace + .update(cx, |workspace, cx| workspace.follow(*peer_id, window, cx)) + .ok(); } - ListEntry::OutgoingRequest(_) => {} - ListEntry::ChannelEditor { .. } => {} } + ListEntry::IncomingRequest(user) => { + self.respond_to_contact_request(user.id, true, window, cx) + } + ListEntry::ChannelInvite(channel) => { + self.respond_to_channel_invite(channel.id, true, cx) + } + ListEntry::ChannelNotes { channel_id } => { + self.open_channel_notes(*channel_id, window, cx) + } + ListEntry::ChannelChat { channel_id } => { + self.join_channel_chat(*channel_id, window, cx) + } + ListEntry::OutgoingRequest(_) => {} + ListEntry::ChannelEditor { .. } => {} } } } @@ -1828,10 +1821,10 @@ impl CollabPanel { } fn select_channel_editor(&mut self) { - self.selection = self.entries.iter().position(|entry| match entry { - ListEntry::ChannelEditor { .. } => true, - _ => false, - }); + self.selection = self + .entries + .iter() + .position(|entry| matches!(entry, ListEntry::ChannelEditor { .. })); } fn new_subchannel( @@ -2317,7 +2310,7 @@ impl CollabPanel { let client = this.client.clone(); cx.spawn_in(window, async move |_, cx| { client - .connect(true, &cx) + .connect(true, cx) .await .into_response() .notify_async_err(cx); @@ -2514,7 +2507,7 @@ impl CollabPanel { let button = match section { Section::ActiveCall => channel_link.map(|channel_link| { - let channel_link_copy = channel_link.clone(); + let channel_link_copy = channel_link; IconButton::new("channel-link", IconName::Copy) .icon_size(IconSize::Small) .size(ButtonSize::None) @@ -2698,7 +2691,7 @@ impl CollabPanel { h_flex() .w_full() .justify_between() - .child(Label::new(github_login.clone())) + .child(Label::new(github_login)) .child(h_flex().children(controls)), ) .start_slot(Avatar::new(user.avatar_uri.clone())) @@ -2931,7 +2924,7 @@ impl CollabPanel { .visible_on_hover(""), ) .child( - IconButton::new("channel_notes", IconName::FileText) + IconButton::new("channel_notes", IconName::Reader) .style(ButtonStyle::Filled) .shape(ui::IconButtonShape::Square) .icon_size(IconSize::Small) @@ -3132,7 +3125,7 @@ impl Panel for CollabPanel { impl Focusable for CollabPanel { fn focus_handle(&self, cx: &App) -> gpui::FocusHandle { - self.filter_editor.focus_handle(cx).clone() + self.filter_editor.focus_handle(cx) } } diff --git a/crates/collab_ui/src/collab_panel/channel_modal.rs b/crates/collab_ui/src/collab_panel/channel_modal.rs index c0d3130ee997e3fe2ffffc4b228de9e512f18340..e558835dbaf0e34e2efa1b4f64fd8f6cb96016c5 100644 --- a/crates/collab_ui/src/collab_panel/channel_modal.rs +++ b/crates/collab_ui/src/collab_panel/channel_modal.rs @@ -586,7 +586,7 @@ impl ChannelModalDelegate { return; }; let user_id = membership.user.id; - let picker = cx.entity().clone(); + let picker = cx.entity(); let context_menu = ContextMenu::build(window, cx, |mut menu, _window, _cx| { let role = membership.role; diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs index 3a280ff6677c9a5f9598d5ecaf473af232a8fed1..bf6fc3b224c54cd1512011987f73623c58d33c32 100644 --- a/crates/collab_ui/src/notification_panel.rs +++ b/crates/collab_ui/src/notification_panel.rs @@ -121,13 +121,12 @@ impl NotificationPanel { let notification_list = ListState::new(0, ListAlignment::Top, px(1000.)); notification_list.set_scroll_handler(cx.listener( |this, event: &ListScrollEvent, _, cx| { - if event.count.saturating_sub(event.visible_range.end) < LOADING_THRESHOLD { - if let Some(task) = this + if event.count.saturating_sub(event.visible_range.end) < LOADING_THRESHOLD + && let Some(task) = this .notification_store .update(cx, |store, cx| store.load_more_notifications(false, cx)) - { - task.detach(); - } + { + task.detach(); } }, )); @@ -290,7 +289,7 @@ impl NotificationPanel { .gap_1() .size_full() .overflow_hidden() - .child(Label::new(text.clone())) + .child(Label::new(text)) .child( h_flex() .child( @@ -321,7 +320,7 @@ impl NotificationPanel { .justify_end() .child(Button::new("decline", "Decline").on_click({ let notification = notification.clone(); - let entity = cx.entity().clone(); + let entity = cx.entity(); move |_, _, cx| { entity.update(cx, |this, cx| { this.respond_to_notification( @@ -334,7 +333,7 @@ impl NotificationPanel { })) .child(Button::new("accept", "Accept").on_click({ let notification = notification.clone(); - let entity = cx.entity().clone(); + let entity = cx.entity(); move |_, _, cx| { entity.update(cx, |this, cx| { this.respond_to_notification( @@ -469,20 +468,19 @@ impl NotificationPanel { channel_id, .. } = notification.clone() + && let Some(workspace) = self.workspace.upgrade() { - if let Some(workspace) = self.workspace.upgrade() { - window.defer(cx, move |window, cx| { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = workspace.focus_panel::(window, cx) { - panel.update(cx, |panel, cx| { - panel - .select_channel(ChannelId(channel_id), Some(message_id), cx) - .detach_and_log_err(cx); - }); - } - }); + window.defer(cx, move |window, cx| { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = workspace.focus_panel::(window, cx) { + panel.update(cx, |panel, cx| { + panel + .select_channel(ChannelId(channel_id), Some(message_id), cx) + .detach_and_log_err(cx); + }); + } }); - } + }); } } @@ -491,18 +489,18 @@ impl NotificationPanel { return false; } - if let Notification::ChannelMessageMention { channel_id, .. } = ¬ification { - if let Some(workspace) = self.workspace.upgrade() { - return if let Some(panel) = workspace.read(cx).panel::(cx) { - let panel = panel.read(cx); - panel.is_scrolled_to_bottom() - && panel - .active_chat() - .map_or(false, |chat| chat.read(cx).channel_id.0 == *channel_id) - } else { - false - }; - } + if let Notification::ChannelMessageMention { channel_id, .. } = ¬ification + && let Some(workspace) = self.workspace.upgrade() + { + return if let Some(panel) = workspace.read(cx).panel::(cx) { + let panel = panel.read(cx); + panel.is_scrolled_to_bottom() + && panel + .active_chat() + .is_some_and(|chat| chat.read(cx).channel_id.0 == *channel_id) + } else { + false + }; } false @@ -582,16 +580,16 @@ impl NotificationPanel { } fn remove_toast(&mut self, notification_id: u64, cx: &mut Context) { - if let Some((current_id, _)) = &self.current_notification_toast { - if *current_id == notification_id { - self.current_notification_toast.take(); - self.workspace - .update(cx, |workspace, cx| { - let id = NotificationId::unique::(); - workspace.dismiss_notification(&id, cx) - }) - .ok(); - } + if let Some((current_id, _)) = &self.current_notification_toast + && *current_id == notification_id + { + self.current_notification_toast.take(); + self.workspace + .update(cx, |workspace, cx| { + let id = NotificationId::unique::(); + workspace.dismiss_notification(&id, cx) + }) + .ok(); } } @@ -643,7 +641,7 @@ impl Render for NotificationPanel { let client = client.clone(); window .spawn(cx, async move |cx| { - match client.connect(true, &cx).await { + match client.connect(true, cx).await { util::ConnectionResult::Timeout => { log::error!("Connection timeout"); } diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index b8800ff91284e6f105c029f7fffe9b4b83b6bcd1..227d246f04cecf8a5c58f2361d0b543ff678eac6 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -206,7 +206,7 @@ impl CommandPaletteDelegate { if parse_zed_link(&query, cx).is_some() { intercept_results = vec![CommandInterceptResult { action: OpenZedUrl { url: query.clone() }.boxed_clone(), - string: query.clone(), + string: query, positions: vec![], }] } diff --git a/crates/component/src/component_layout.rs b/crates/component/src/component_layout.rs index 58bf1d8f0c85533a4a06bd38c07f840c08cc6de3..a840d520a62b57516f20c190f2a5148505ccfed4 100644 --- a/crates/component/src/component_layout.rs +++ b/crates/component/src/component_layout.rs @@ -42,7 +42,7 @@ impl RenderOnce for ComponentExample { div() .text_size(rems(0.875)) .text_color(cx.theme().colors().text_muted) - .child(description.clone()), + .child(description), ) }), ) diff --git a/crates/context_server/src/client.rs b/crates/context_server/src/client.rs index 65283afa87d94fae3ec51f8a89574713080bded2..03cf047ac5273071354756119864ab6914e524c6 100644 --- a/crates/context_server/src/client.rs +++ b/crates/context_server/src/client.rs @@ -67,11 +67,7 @@ pub(crate) struct Client { pub(crate) struct ContextServerId(pub Arc); fn is_null_value(value: &T) -> bool { - if let Ok(Value::Null) = serde_json::to_value(value) { - true - } else { - false - } + matches!(serde_json::to_value(value), Ok(Value::Null)) } #[derive(Serialize, Deserialize)] @@ -161,7 +157,7 @@ impl Client { working_directory: &Option, cx: AsyncApp, ) -> Result { - log::info!( + log::debug!( "starting context server (executable={:?}, args={:?})", binary.executable, &binary.args @@ -271,10 +267,10 @@ impl Client { ); } } else if let Ok(response) = serde_json::from_str::(&message) { - if let Some(handlers) = response_handlers.lock().as_mut() { - if let Some(handler) = handlers.remove(&response.id) { - handler(Ok(message.to_string())); - } + if let Some(handlers) = response_handlers.lock().as_mut() + && let Some(handler) = handlers.remove(&response.id) + { + handler(Ok(message.to_string())); } } else if let Ok(notification) = serde_json::from_str::(&message) { let mut notification_handlers = notification_handlers.lock(); @@ -295,7 +291,7 @@ impl Client { /// Continuously reads and logs any error messages from the server. async fn handle_err(transport: Arc) -> anyhow::Result<()> { while let Some(err) = transport.receive_err().next().await { - log::warn!("context server stderr: {}", err.trim()); + log::debug!("context server stderr: {}", err.trim()); } Ok(()) diff --git a/crates/context_server/src/context_server.rs b/crates/context_server/src/context_server.rs index 34fa29678d5d68f864de7d9df3bef82d4c667f05..9ca78138dbc229a6aedd5c53c460d9205502df94 100644 --- a/crates/context_server/src/context_server.rs +++ b/crates/context_server/src/context_server.rs @@ -137,7 +137,7 @@ impl ContextServer { } async fn initialize(&self, client: Client) -> Result<()> { - log::info!("starting context server {}", self.id); + log::debug!("starting context server {}", self.id); let protocol = crate::protocol::ModelContextProtocol::new(client); let client_info = types::Implementation { name: "Zed".to_string(), diff --git a/crates/context_server/src/listener.rs b/crates/context_server/src/listener.rs index 0e85fb21292739ab0a92d0898fc449a31efe6f29..4e5da2566ee25ee70e1687cf5f0806e19789a824 100644 --- a/crates/context_server/src/listener.rs +++ b/crates/context_server/src/listener.rs @@ -14,6 +14,7 @@ use serde::de::DeserializeOwned; use serde_json::{json, value::RawValue}; use smol::stream::StreamExt; use std::{ + any::TypeId, cell::RefCell, path::{Path, PathBuf}, rc::Rc, @@ -77,7 +78,7 @@ impl McpServer { socket_path, _server_task: server_task, tools, - handlers: handlers, + handlers, }) }) } @@ -87,23 +88,30 @@ impl McpServer { settings.inline_subschemas = true; let mut generator = settings.into_generator(); - let output_schema = generator.root_schema_for::(); - let unit_schema = generator.root_schema_for::(); + let input_schema = generator.root_schema_for::(); + + let description = input_schema + .get("description") + .and_then(|desc| desc.as_str()) + .map(|desc| desc.to_string()); + debug_assert!( + description.is_some(), + "Input schema struct must include a doc comment for the tool description" + ); let registered_tool = RegisteredTool { tool: Tool { name: T::NAME.into(), - description: Some(tool.description().into()), - input_schema: generator.root_schema_for::().into(), - output_schema: if output_schema == unit_schema { + description, + input_schema: input_schema.into(), + output_schema: if TypeId::of::() == TypeId::of::<()>() { None } else { - Some(output_schema.into()) + Some(generator.root_schema_for::().into()) }, annotations: Some(tool.annotations()), }, handler: Box::new({ - let tool = tool.clone(); move |input_value, cx| { let input = match input_value { Some(input) => serde_json::from_value(input), @@ -315,12 +323,12 @@ impl McpServer { Self::send_err( request_id, format!("Tool not found: {}", params.name), - &outgoing_tx, + outgoing_tx, ); } } Err(err) => { - Self::send_err(request_id, err.to_string(), &outgoing_tx); + Self::send_err(request_id, err.to_string(), outgoing_tx); } } } @@ -399,8 +407,6 @@ pub trait McpServerTool { const NAME: &'static str; - fn description(&self) -> &'static str; - fn annotations(&self) -> ToolAnnotations { ToolAnnotations { title: None, @@ -418,6 +424,7 @@ pub trait McpServerTool { ) -> impl Future>>; } +#[derive(Debug)] pub struct ToolResponse { pub content: Vec, pub structured_content: T, diff --git a/crates/context_server/src/types.rs b/crates/context_server/src/types.rs index 5fa2420a3d40ce04ee97b4f88c1105711dea8793..03aca4f3caf7995091bbc8e049494b324674a9d3 100644 --- a/crates/context_server/src/types.rs +++ b/crates/context_server/src/types.rs @@ -691,7 +691,7 @@ impl CallToolResponse { let mut text = String::new(); for chunk in &self.content { if let ToolResponseContent::Text { text: chunk } = chunk { - text.push_str(&chunk) + text.push_str(chunk) }; } text @@ -711,6 +711,16 @@ pub enum ToolResponseContent { Resource { resource: ResourceContents }, } +impl ToolResponseContent { + pub fn text(&self) -> Option<&str> { + if let ToolResponseContent::Text { text } = self { + Some(text) + } else { + None + } + } +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ListToolsResponse { diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 49ae2b9d9c92c5deba00c54c51d48deb82d03dcc..b7d8423fd7d4d601250172a5789cbe83620849af 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -21,7 +21,7 @@ use language::{ point_from_lsp, point_to_lsp, }; use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use parking_lot::Mutex; use project::DisableAiSettings; use request::StatusNotification; @@ -81,10 +81,7 @@ pub fn init( }; copilot_chat::init(fs.clone(), http.clone(), configuration, cx); - let copilot = cx.new({ - let node_runtime = node_runtime.clone(); - move |cx| Copilot::start(new_server_id, fs, node_runtime, cx) - }); + let copilot = cx.new(move |cx| Copilot::start(new_server_id, fs, node_runtime, cx)); Copilot::set_global(copilot.clone(), cx); cx.observe(&copilot, |copilot, cx| { copilot.update(cx, |copilot, cx| copilot.update_action_visibilities(cx)); @@ -129,7 +126,7 @@ impl CopilotServer { fn as_authenticated(&mut self) -> Result<&mut RunningCopilotServer> { let server = self.as_running()?; anyhow::ensure!( - matches!(server.sign_in_status, SignInStatus::Authorized { .. }), + matches!(server.sign_in_status, SignInStatus::Authorized), "must sign in before using copilot" ); Ok(server) @@ -349,7 +346,11 @@ impl Copilot { this.start_copilot(true, false, cx); cx.observe_global::(move |this, cx| { this.start_copilot(true, false, cx); - this.send_configuration_update(cx); + if let Ok(server) = this.server.as_running() { + notify_did_change_config_to_server(&server.lsp, cx) + .context("copilot setting change: did change configuration") + .log_err(); + } }) .detach(); this @@ -438,43 +439,6 @@ impl Copilot { if env.is_empty() { None } else { Some(env) } } - fn send_configuration_update(&mut self, cx: &mut Context) { - let copilot_settings = all_language_settings(None, cx) - .edit_predictions - .copilot - .clone(); - - let settings = json!({ - "http": { - "proxy": copilot_settings.proxy, - "proxyStrictSSL": !copilot_settings.proxy_no_verify.unwrap_or(false) - }, - "github-enterprise": { - "uri": copilot_settings.enterprise_uri - } - }); - - if let Some(copilot_chat) = copilot_chat::CopilotChat::global(cx) { - copilot_chat.update(cx, |chat, cx| { - chat.set_configuration( - copilot_chat::CopilotChatConfiguration { - enterprise_uri: copilot_settings.enterprise_uri.clone(), - }, - cx, - ); - }); - } - - if let Ok(server) = self.server.as_running() { - server - .lsp - .notify::( - &lsp::DidChangeConfigurationParams { settings }, - ) - .log_err(); - } - } - #[cfg(any(test, feature = "test-support"))] pub fn fake(cx: &mut gpui::TestAppContext) -> (Entity, lsp::FakeLanguageServer) { use fs::FakeFs; @@ -573,6 +537,9 @@ impl Copilot { })? .await?; + this.update(cx, |_, cx| notify_did_change_config_to_server(&server, cx))? + .context("copilot: did change configuration")?; + let status = server .request::(request::CheckStatusParams { local_checks_only: false, @@ -598,8 +565,6 @@ impl Copilot { }); cx.emit(Event::CopilotLanguageServerStarted); this.update_sign_in_status(status, cx); - // Send configuration now that the LSP is fully started - this.send_configuration_update(cx); } Err(error) => { this.server = CopilotServer::Error(error.to_string().into()); @@ -613,12 +578,12 @@ impl Copilot { pub(crate) fn sign_in(&mut self, cx: &mut Context) -> Task> { if let CopilotServer::Running(server) = &mut self.server { let task = match &server.sign_in_status { - SignInStatus::Authorized { .. } => Task::ready(Ok(())).shared(), + SignInStatus::Authorized => Task::ready(Ok(())).shared(), SignInStatus::SigningIn { task, .. } => { cx.notify(); task.clone() } - SignInStatus::SignedOut { .. } | SignInStatus::Unauthorized { .. } => { + SignInStatus::SignedOut { .. } | SignInStatus::Unauthorized => { let lsp = server.lsp.clone(); let task = cx .spawn(async move |this, cx| { @@ -640,15 +605,13 @@ impl Copilot { sign_in_status: status, .. }) = &mut this.server - { - if let SignInStatus::SigningIn { + && let SignInStatus::SigningIn { prompt: prompt_flow, .. } = status - { - *prompt_flow = Some(flow.clone()); - cx.notify(); - } + { + *prompt_flow = Some(flow.clone()); + cx.notify(); } })?; let response = lsp @@ -764,7 +727,7 @@ impl Copilot { .. }) = &mut self.server { - if !matches!(status, SignInStatus::Authorized { .. }) { + if !matches!(status, SignInStatus::Authorized) { return; } @@ -814,59 +777,58 @@ impl Copilot { event: &language::BufferEvent, cx: &mut Context, ) -> Result<()> { - if let Ok(server) = self.server.as_running() { - if let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) - { - match event { - language::BufferEvent::Edited => { - drop(registered_buffer.report_changes(&buffer, cx)); - } - language::BufferEvent::Saved => { + if let Ok(server) = self.server.as_running() + && let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) + { + match event { + language::BufferEvent::Edited => { + drop(registered_buffer.report_changes(&buffer, cx)); + } + language::BufferEvent::Saved => { + server + .lsp + .notify::( + &lsp::DidSaveTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new( + registered_buffer.uri.clone(), + ), + text: None, + }, + )?; + } + language::BufferEvent::FileHandleChanged + | language::BufferEvent::LanguageChanged => { + let new_language_id = id_for_language(buffer.read(cx).language()); + let Ok(new_uri) = uri_for_buffer(&buffer, cx) else { + return Ok(()); + }; + if new_uri != registered_buffer.uri + || new_language_id != registered_buffer.language_id + { + let old_uri = mem::replace(&mut registered_buffer.uri, new_uri); + registered_buffer.language_id = new_language_id; server .lsp - .notify::( - &lsp::DidSaveTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new( + .notify::( + &lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(old_uri), + }, + )?; + server + .lsp + .notify::( + &lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( registered_buffer.uri.clone(), + registered_buffer.language_id.clone(), + registered_buffer.snapshot_version, + registered_buffer.snapshot.text(), ), - text: None, }, )?; } - language::BufferEvent::FileHandleChanged - | language::BufferEvent::LanguageChanged => { - let new_language_id = id_for_language(buffer.read(cx).language()); - let Ok(new_uri) = uri_for_buffer(&buffer, cx) else { - return Ok(()); - }; - if new_uri != registered_buffer.uri - || new_language_id != registered_buffer.language_id - { - let old_uri = mem::replace(&mut registered_buffer.uri, new_uri); - registered_buffer.language_id = new_language_id; - server - .lsp - .notify::( - &lsp::DidCloseTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new(old_uri), - }, - )?; - server - .lsp - .notify::( - &lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - registered_buffer.uri.clone(), - registered_buffer.language_id.clone(), - registered_buffer.snapshot_version, - registered_buffer.snapshot.text(), - ), - }, - )?; - } - } - _ => {} } + _ => {} } } @@ -874,17 +836,17 @@ impl Copilot { } fn unregister_buffer(&mut self, buffer: &WeakEntity) { - if let Ok(server) = self.server.as_running() { - if let Some(buffer) = server.registered_buffers.remove(&buffer.entity_id()) { - server - .lsp - .notify::( - &lsp::DidCloseTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new(buffer.uri), - }, - ) - .ok(); - } + if let Ok(server) = self.server.as_running() + && let Some(buffer) = server.registered_buffers.remove(&buffer.entity_id()) + { + server + .lsp + .notify::( + &lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(buffer.uri), + }, + ) + .ok(); } } @@ -1047,8 +1009,8 @@ impl Copilot { CopilotServer::Error(error) => Status::Error(error.clone()), CopilotServer::Running(RunningCopilotServer { sign_in_status, .. }) => { match sign_in_status { - SignInStatus::Authorized { .. } => Status::Authorized, - SignInStatus::Unauthorized { .. } => Status::Unauthorized, + SignInStatus::Authorized => Status::Authorized, + SignInStatus::Unauthorized => Status::Unauthorized, SignInStatus::SigningIn { prompt, .. } => Status::SigningIn { prompt: prompt.clone(), }, @@ -1156,6 +1118,41 @@ fn uri_for_buffer(buffer: &Entity, cx: &App) -> Result { } } +fn notify_did_change_config_to_server( + server: &Arc, + cx: &mut Context, +) -> std::result::Result<(), anyhow::Error> { + let copilot_settings = all_language_settings(None, cx) + .edit_predictions + .copilot + .clone(); + + if let Some(copilot_chat) = copilot_chat::CopilotChat::global(cx) { + copilot_chat.update(cx, |chat, cx| { + chat.set_configuration( + copilot_chat::CopilotChatConfiguration { + enterprise_uri: copilot_settings.enterprise_uri.clone(), + }, + cx, + ); + }); + } + + let settings = json!({ + "http": { + "proxy": copilot_settings.proxy, + "proxyStrictSSL": !copilot_settings.proxy_no_verify.unwrap_or(false) + }, + "github-enterprise": { + "uri": copilot_settings.enterprise_uri + } + }); + + server.notify::(&lsp::DidChangeConfigurationParams { + settings, + }) +} + async fn clear_copilot_dir() { remove_matching(paths::copilot_dir(), |_| true).await } @@ -1181,7 +1178,7 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: PACKAGE_NAME, &server_path, paths::copilot_dir(), - &latest_version, + VersionStrategy::Latest(&latest_version), ) .await; if should_install { diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 4c91b4fedb790ab3500273ff21aba767cacd28e0..e8e2251648e4b941fe616b4524337fe565513950 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -484,7 +484,7 @@ impl CopilotChat { }; if this.oauth_token.is_some() { - cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await) + cx.spawn(async move |this, cx| Self::update_models(&this, cx).await) .detach_and_log_err(cx); } @@ -863,7 +863,7 @@ mod tests { "object": "list" }"#; - let schema: ModelSchema = serde_json::from_str(&json).unwrap(); + let schema: ModelSchema = serde_json::from_str(json).unwrap(); assert_eq!(schema.data.len(), 2); assert_eq!(schema.data[0].id, "gpt-4"); diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index 2fd6df27b9e15d4247d85edca4d8836c35b23df1..9308500ed49f8f619f87aba585ee5e6b00a350ae 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -1083,7 +1083,7 @@ mod tests { let replace_range_marker: TextRangeMarker = ('<', '>').into(); let (_, mut marked_ranges) = marked_text_ranges_by( marked_string, - vec![complete_from_marker.clone(), replace_range_marker.clone()], + vec![complete_from_marker, replace_range_marker.clone()], ); let replace_range = diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 641a97765a70f75edbfe478d3a493322b9c443eb..f12913d1cbda34219430d8aba8e56431a056da59 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -10,9 +10,15 @@ crash-handler.workspace = true log.workspace = true minidumper.workspace = true paths.workspace = true +release_channel.workspace = true smol.workspace = true +serde.workspace = true +serde_json.workspace = true workspace-hack.workspace = true +[target.'cfg(target_os = "macos")'.dependencies] +mach2.workspace = true + [lints] workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index cfb4b57d5dfdf9303fa799fbe0a4d200657612c0..b1afc5ae454d30c9c8ee283fce5e03b942fb7c70 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -1,15 +1,20 @@ use crash_handler::CrashHandler; use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; +use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; +use serde::{Deserialize, Serialize}; +#[cfg(target_os = "macos")] +use std::sync::atomic::AtomicU32; use std::{ env, - fs::File, + fs::{self, File}, io, + panic::Location, path::{Path, PathBuf}, process::{self, Command}, sync::{ - OnceLock, + Arc, OnceLock, atomic::{AtomicBool, Ordering}, }, thread, @@ -17,12 +22,20 @@ use std::{ }; // set once the crash handler has initialized and the client has connected to it -pub static CRASH_HANDLER: AtomicBool = AtomicBool::new(false); +pub static CRASH_HANDLER: OnceLock> = OnceLock::new(); // set when the first minidump request is made to avoid generating duplicate crash reports pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); -const CRASH_HANDLER_TIMEOUT: Duration = Duration::from_secs(60); +const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60); +const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); + +#[cfg(target_os = "macos")] +static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); + +pub async fn init(crash_init: InitCrashHandler) { + if *RELEASE_CHANNEL == ReleaseChannel::Dev && env::var("ZED_GENERATE_MINIDUMPS").is_err() { + return; + } -pub async fn init(id: String) { let exe = env::current_exe().expect("unable to find ourselves"); let zed_pid = process::id(); // TODO: we should be able to get away with using 1 crash-handler process per machine, @@ -53,9 +66,11 @@ pub async fn init(id: String) { smol::Timer::after(retry_frequency).await; } let client = maybe_client.unwrap(); - client.send_message(1, id).unwrap(); // set session id on the server + client + .send_message(1, serde_json::to_vec(&crash_init).unwrap()) + .unwrap(); - let client = std::sync::Arc::new(client); + let client = Arc::new(client); let handler = crash_handler::CrashHandler::attach(unsafe { let client = client.clone(); crash_handler::make_crash_event(move |crash_context: &crash_handler::CrashContext| { @@ -64,7 +79,9 @@ pub async fn init(id: String) { .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) .is_ok() { - client.send_message(2, "mistakes were made").unwrap(); + #[cfg(target_os = "macos")] + suspend_all_other_threads(); + client.ping().unwrap(); client.request_dump(crash_context).is_ok() } else { @@ -79,7 +96,7 @@ pub async fn init(id: String) { { handler.set_ptracer(Some(server_pid)); } - CRASH_HANDLER.store(true, Ordering::Release); + CRASH_HANDLER.set(client.clone()).ok(); std::mem::forget(handler); info!("crash handler registered"); @@ -89,61 +106,151 @@ pub async fn init(id: String) { } } +#[cfg(target_os = "macos")] +unsafe fn suspend_all_other_threads() { + let task = unsafe { mach2::traps::current_task() }; + let mut threads: mach2::mach_types::thread_act_array_t = std::ptr::null_mut(); + let mut count = 0; + unsafe { + mach2::task::task_threads(task, &raw mut threads, &raw mut count); + } + let current = unsafe { mach2::mach_init::mach_thread_self() }; + let panic_thread = PANIC_THREAD_ID.load(Ordering::SeqCst); + for i in 0..count { + let t = unsafe { *threads.add(i as usize) }; + if t != current && t != panic_thread { + unsafe { mach2::thread_act::thread_suspend(t) }; + } + } +} + pub struct CrashServer { - session_id: OnceLock, + initialization_params: OnceLock, + panic_info: OnceLock, + has_connection: Arc, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct CrashInfo { + pub init: InitCrashHandler, + pub panic: Option, + pub minidump_error: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct InitCrashHandler { + pub session_id: String, + pub zed_version: String, + pub release_channel: String, + pub commit_sha: String, + // pub gpu: String, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct CrashPanic { + pub message: String, + pub span: String, } impl minidumper::ServerHandler for CrashServer { fn create_minidump_file(&self) -> Result<(File, PathBuf), io::Error> { - let err_message = "Need to send a message with the ID upon starting the crash handler"; + let err_message = "Missing initialization data"; let dump_path = paths::logs_dir() - .join(self.session_id.get().expect(err_message)) + .join( + &self + .initialization_params + .get() + .expect(err_message) + .session_id, + ) .with_extension("dmp"); let file = File::create(&dump_path)?; Ok((file, dump_path)) } fn on_minidump_created(&self, result: Result) -> LoopAction { - match result { + let minidump_error = match result { Ok(mut md_bin) => { use io::Write; let _ = md_bin.file.flush(); - info!("wrote minidump to disk {:?}", md_bin.path); - } - Err(e) => { - info!("failed to write minidump: {:#}", e); + None } - } + Err(e) => Some(format!("{e:?}")), + }; + + let crash_info = CrashInfo { + init: self + .initialization_params + .get() + .expect("not initialized") + .clone(), + panic: self.panic_info.get().cloned(), + minidump_error, + }; + + let crash_data_path = paths::logs_dir() + .join(&crash_info.init.session_id) + .with_extension("json"); + + fs::write(crash_data_path, serde_json::to_vec(&crash_info).unwrap()).ok(); + LoopAction::Exit } fn on_message(&self, kind: u32, buffer: Vec) { - let message = String::from_utf8(buffer).expect("invalid utf-8"); - info!("kind: {kind}, message: {message}",); - if kind == 1 { - self.session_id - .set(message) - .expect("session id already initialized"); + match kind { + 1 => { + let init_data = + serde_json::from_slice::(&buffer).expect("invalid init data"); + self.initialization_params + .set(init_data) + .expect("already initialized"); + } + 2 => { + let panic_data = + serde_json::from_slice::(&buffer).expect("invalid panic data"); + self.panic_info.set(panic_data).expect("already panicked"); + } + _ => { + panic!("invalid message kind"); + } } } - fn on_client_disconnected(&self, clients: usize) -> LoopAction { - info!("client disconnected, {clients} remaining"); - if clients == 0 { - LoopAction::Exit - } else { - LoopAction::Continue - } + fn on_client_disconnected(&self, _clients: usize) -> LoopAction { + LoopAction::Exit + } + + fn on_client_connected(&self, _clients: usize) -> LoopAction { + self.has_connection.store(true, Ordering::SeqCst); + LoopAction::Continue } } -pub fn handle_panic() { +pub fn handle_panic(message: String, span: Option<&Location>) { + let span = span + .map(|loc| format!("{}:{}", loc.file(), loc.line())) + .unwrap_or_default(); + // wait 500ms for the crash handler process to start up // if it's still not there just write panic info and no minidump let retry_frequency = Duration::from_millis(100); for _ in 0..5 { - if CRASH_HANDLER.load(Ordering::Acquire) { + if let Some(client) = CRASH_HANDLER.get() { + client + .send_message( + 2, + serde_json::to_vec(&CrashPanic { message, span }).unwrap(), + ) + .ok(); log::error!("triggering a crash to generate a minidump..."); + + #[cfg(target_os = "macos")] + PANIC_THREAD_ID.store( + unsafe { mach2::mach_init::mach_thread_self() }, + Ordering::SeqCst, + ); + #[cfg(target_os = "linux")] CrashHandler.simulate_signal(crash_handler::Signal::Trap as u32); #[cfg(not(target_os = "linux"))] @@ -159,14 +266,30 @@ pub fn crash_server(socket: &Path) { log::info!("Couldn't create socket, there may already be a running crash server"); return; }; - let ab = AtomicBool::new(false); + + let shutdown = Arc::new(AtomicBool::new(false)); + let has_connection = Arc::new(AtomicBool::new(false)); + + std::thread::spawn({ + let shutdown = shutdown.clone(); + let has_connection = has_connection.clone(); + move || { + std::thread::sleep(CRASH_HANDLER_CONNECT_TIMEOUT); + if !has_connection.load(Ordering::SeqCst) { + shutdown.store(true, Ordering::SeqCst); + } + } + }); + server .run( Box::new(CrashServer { - session_id: OnceLock::new(), + initialization_params: OnceLock::new(), + panic_info: OnceLock::new(), + has_connection, }), - &ab, - Some(CRASH_HANDLER_TIMEOUT), + &shutdown, + Some(CRASH_HANDLER_PING_TIMEOUT), ) .expect("failed to run server"); } diff --git a/crates/credentials_provider/src/credentials_provider.rs b/crates/credentials_provider/src/credentials_provider.rs index f72fd6c39b12d5d46cfa1d4f3f30900f01471e64..2c8dd6fc812aaeffd6c06c88ee2adceabdbb27a3 100644 --- a/crates/credentials_provider/src/credentials_provider.rs +++ b/crates/credentials_provider/src/credentials_provider.rs @@ -19,7 +19,7 @@ use release_channel::ReleaseChannel; /// Only works in development. Setting this environment variable in other /// release channels is a no-op. static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock = LazyLock::new(|| { - std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").map_or(false, |value| !value.is_empty()) + std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty()) }); /// A provider for credentials. diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index 687305ae94da3bc1ddd72e9e9f4594f4f4a19ee4..2cef266677c1314f6fd253b9caf77914050ceb96 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -285,7 +285,7 @@ pub async fn download_adapter_from_github( } if !adapter_path.exists() { - fs.create_dir(&adapter_path.as_path()) + fs.create_dir(adapter_path.as_path()) .await .context("Failed creating adapter path")?; } diff --git a/crates/dap/src/client.rs b/crates/dap/src/client.rs index 7b791450ecba3b09b6571ac84fbebdf92fff57b8..2590bf5c8b0db8e70a7897b8de4bc878187e4daa 100644 --- a/crates/dap/src/client.rs +++ b/crates/dap/src/client.rs @@ -23,7 +23,7 @@ impl SessionId { Self(client_id as u32) } - pub fn to_proto(&self) -> u64 { + pub fn to_proto(self) -> u64 { self.0 as u64 } } diff --git a/crates/dap_adapters/src/codelldb.rs b/crates/dap_adapters/src/codelldb.rs index 5b88db4432d3823e8a85228a82ca064cfacad23c..25dc875740e8aba87872e8dc93fa8d77062ed545 100644 --- a/crates/dap_adapters/src/codelldb.rs +++ b/crates/dap_adapters/src/codelldb.rs @@ -338,8 +338,8 @@ impl DebugAdapter for CodeLldbDebugAdapter { if command.is_none() { delegate.output_to_console(format!("Checking latest version of {}...", self.name())); let adapter_path = paths::debug_adapters_dir().join(&Self::ADAPTER_NAME); - let version_path = - if let Ok(version) = self.fetch_latest_adapter_version(delegate).await { + let version_path = match self.fetch_latest_adapter_version(delegate).await { + Ok(version) => { adapters::download_adapter_from_github( self.name(), version.clone(), @@ -351,10 +351,26 @@ impl DebugAdapter for CodeLldbDebugAdapter { adapter_path.join(format!("{}_{}", Self::ADAPTER_NAME, version.tag_name)); remove_matching(&adapter_path, |entry| entry != version_path).await; version_path - } else { - let mut paths = delegate.fs().read_dir(&adapter_path).await?; - paths.next().await.context("No adapter found")?? - }; + } + Err(e) => { + delegate.output_to_console("Unable to fetch latest version".to_string()); + log::error!("Error fetching latest version of {}: {}", self.name(), e); + delegate.output_to_console(format!( + "Searching for adapters in: {}", + adapter_path.display() + )); + let mut paths = delegate + .fs() + .read_dir(&adapter_path) + .await + .context("No cached adapter directory")?; + paths + .next() + .await + .context("No cached adapter found")? + .context("No cached adapter found")? + } + }; let adapter_dir = version_path.join("extension").join("adapter"); let path = adapter_dir.join("codelldb").to_string_lossy().to_string(); self.path_to_codelldb.set(path.clone()).ok(); @@ -369,7 +385,7 @@ impl DebugAdapter for CodeLldbDebugAdapter { && let Some(source_languages) = config.get("sourceLanguages").filter(|value| { value .as_array() - .map_or(false, |array| array.iter().all(Value::is_string)) + .is_some_and(|array| array.iter().all(Value::is_string)) }) { let ret = vec![ diff --git a/crates/dap_adapters/src/go.rs b/crates/dap_adapters/src/go.rs index 22d8262b93e36b17e548ae4dcc9bb725da8ca7cb..db8a45ceb49963eab053f3e7728d4cf715e9a40b 100644 --- a/crates/dap_adapters/src/go.rs +++ b/crates/dap_adapters/src/go.rs @@ -36,7 +36,7 @@ impl GoDebugAdapter { delegate: &Arc, ) -> Result { let release = latest_github_release( - &"zed-industries/delve-shim-dap", + "zed-industries/delve-shim-dap", true, false, delegate.http_client(), diff --git a/crates/dap_adapters/src/javascript.rs b/crates/dap_adapters/src/javascript.rs index 2d19921a0f0c979fe53ede5860ac0c4d26b510c3..a8826d563b09925068dd6da1be865f1e17bce0ec 100644 --- a/crates/dap_adapters/src/javascript.rs +++ b/crates/dap_adapters/src/javascript.rs @@ -99,10 +99,10 @@ impl JsDebugAdapter { } } - if let Some(env) = configuration.get("env").cloned() { - if let Ok(env) = serde_json::from_value(env) { - envs = env; - } + if let Some(env) = configuration.get("env").cloned() + && let Ok(env) = serde_json::from_value(env) + { + envs = env; } configuration @@ -514,7 +514,7 @@ impl DebugAdapter for JsDebugAdapter { } } - self.get_installed_binary(delegate, &config, user_installed_path, user_args, cx) + self.get_installed_binary(delegate, config, user_installed_path, user_args, cx) .await } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 461ce6fbb3176508d74524f8159eb6a0cc448932..614cd0e05d1821539c74eb4e78321fb1e0c29445 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -24,6 +24,7 @@ use util::{ResultExt, maybe}; #[derive(Default)] pub(crate) struct PythonDebugAdapter { + base_venv_path: OnceCell, String>>, debugpy_whl_base_path: OnceCell, String>>, } @@ -91,14 +92,12 @@ impl PythonDebugAdapter { }) } - async fn fetch_wheel(delegate: &Arc) -> Result, String> { - let system_python = Self::system_python_name(delegate) - .await - .ok_or_else(|| String::from("Could not find a Python installation"))?; - let command: &OsStr = system_python.as_ref(); + async fn fetch_wheel(&self, delegate: &Arc) -> Result, String> { let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME).join("wheels"); std::fs::create_dir_all(&download_dir).map_err(|e| e.to_string())?; - let installation_succeeded = util::command::new_smol_command(command) + let system_python = self.base_venv_path(delegate).await?; + + let installation_succeeded = util::command::new_smol_command(system_python.as_ref()) .args([ "-m", "pip", @@ -114,7 +113,7 @@ impl PythonDebugAdapter { .status .success(); if !installation_succeeded { - return Err("debugpy installation failed".into()); + return Err("debugpy installation failed (could not fetch Debugpy's wheel)".into()); } let wheel_path = std::fs::read_dir(&download_dir) @@ -139,7 +138,7 @@ impl PythonDebugAdapter { Ok(Arc::from(wheel_path.path())) } - async fn maybe_fetch_new_wheel(delegate: &Arc) { + async fn maybe_fetch_new_wheel(&self, delegate: &Arc) { let latest_release = delegate .http_client() .get( @@ -152,6 +151,9 @@ impl PythonDebugAdapter { maybe!(async move { let response = latest_release.filter(|response| response.status().is_success())?; + let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME); + std::fs::create_dir_all(&download_dir).ok()?; + let mut output = String::new(); response .into_body() @@ -188,7 +190,7 @@ impl PythonDebugAdapter { ) .await .ok()?; - Self::fetch_wheel(delegate).await.ok()?; + self.fetch_wheel(delegate).await.ok()?; } Some(()) }) @@ -201,7 +203,7 @@ impl PythonDebugAdapter { ) -> Result, String> { self.debugpy_whl_base_path .get_or_init(|| async move { - Self::maybe_fetch_new_wheel(delegate).await; + self.maybe_fetch_new_wheel(delegate).await; Ok(Arc::from( debug_adapters_dir() .join(Self::ADAPTER_NAME) @@ -214,6 +216,45 @@ impl PythonDebugAdapter { .clone() } + async fn base_venv_path(&self, delegate: &Arc) -> Result, String> { + self.base_venv_path + .get_or_init(|| async { + let base_python = Self::system_python_name(delegate) + .await + .ok_or_else(|| String::from("Could not find a Python installation"))?; + + let did_succeed = util::command::new_smol_command(base_python) + .args(["-m", "venv", "zed_base_venv"]) + .current_dir( + paths::debug_adapters_dir().join(Self::DEBUG_ADAPTER_NAME.as_ref()), + ) + .spawn() + .map_err(|e| format!("{e:#?}"))? + .status() + .await + .map_err(|e| format!("{e:#?}"))? + .success(); + if !did_succeed { + return Err("Failed to create base virtual environment".into()); + } + + const DIR: &str = if cfg!(target_os = "windows") { + "Scripts" + } else { + "bin" + }; + Ok(Arc::from( + paths::debug_adapters_dir() + .join(Self::DEBUG_ADAPTER_NAME.as_ref()) + .join("zed_base_venv") + .join(DIR) + .join("python3") + .as_ref(), + )) + }) + .await + .clone() + } async fn system_python_name(delegate: &Arc) -> Option { const BINARY_NAMES: [&str; 3] = ["python3", "python", "py"]; let mut name = None; @@ -676,7 +717,7 @@ impl DebugAdapter for PythonDebugAdapter { local_path.display() ); return self - .get_installed_binary(delegate, &config, Some(local_path.clone()), user_args, None) + .get_installed_binary(delegate, config, Some(local_path.clone()), user_args, None) .await; } @@ -713,7 +754,7 @@ impl DebugAdapter for PythonDebugAdapter { return self .get_installed_binary( delegate, - &config, + config, None, user_args, Some(toolchain.path.to_string()), @@ -721,7 +762,7 @@ impl DebugAdapter for PythonDebugAdapter { .await; } - self.get_installed_binary(delegate, &config, None, user_args, None) + self.get_installed_binary(delegate, config, None, user_args, None) .await } diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index de55212cbadfdd3c66ede66b706a3d120dd765c5..8b790cbec8498c1c3f83d55b25c14042b04b9424 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -37,7 +37,7 @@ const FALLBACK_DB_NAME: &str = "FALLBACK_MEMORY_DB"; const DB_FILE_NAME: &str = "db.sqlite"; pub static ZED_STATELESS: LazyLock = - LazyLock::new(|| env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty())); + LazyLock::new(|| env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty())); pub static ALL_FILE_DB_FAILED: LazyLock = LazyLock::new(|| AtomicBool::new(false)); @@ -74,7 +74,7 @@ pub async fn open_db(db_dir: &Path, scope: &str) -> Threa } async fn open_main_db(db_path: &Path) -> Option { - log::info!("Opening database {}", db_path.display()); + log::trace!("Opening database {}", db_path.display()); ThreadSafeConnection::builder::(db_path.to_string_lossy().as_ref(), true) .with_db_initialization_query(DB_INITIALIZE_QUERY) .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY) @@ -238,7 +238,7 @@ mod tests { .unwrap(); let _bad_db = open_db::( tempdir.path(), - &release_channel::ReleaseChannel::Dev.dev_name(), + release_channel::ReleaseChannel::Dev.dev_name(), ) .await; } @@ -279,7 +279,7 @@ mod tests { { let corrupt_db = open_db::( tempdir.path(), - &release_channel::ReleaseChannel::Dev.dev_name(), + release_channel::ReleaseChannel::Dev.dev_name(), ) .await; assert!(corrupt_db.persistent()); @@ -287,7 +287,7 @@ mod tests { let good_db = open_db::( tempdir.path(), - &release_channel::ReleaseChannel::Dev.dev_name(), + release_channel::ReleaseChannel::Dev.dev_name(), ) .await; assert!( @@ -334,7 +334,7 @@ mod tests { // Setup the bad database let corrupt_db = open_db::( tempdir.path(), - &release_channel::ReleaseChannel::Dev.dev_name(), + release_channel::ReleaseChannel::Dev.dev_name(), ) .await; assert!(corrupt_db.persistent()); @@ -347,7 +347,7 @@ mod tests { let guard = thread::spawn(move || { let good_db = smol::block_on(open_db::( tmp_path.as_path(), - &release_channel::ReleaseChannel::Dev.dev_name(), + release_channel::ReleaseChannel::Dev.dev_name(), )); assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index daf0b136fde5bd62411c70033e8bcfcb668a5e06..256b789c9b2f2909ec5b12f6dc9dd60c04555e51 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -20,7 +20,7 @@ pub trait Dismissable { KEY_VALUE_STORE .read_kvp(Self::KEY) .log_err() - .map_or(false, |s| s.is_some()) + .is_some_and(|s| s.is_some()) } fn set_dismissed(is_dismissed: bool, cx: &mut App) { diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index b806381d251c6595a5dd12022dc3d1df8b71739f..c4338c6d0017a215c721c772871647c89227775e 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -392,7 +392,7 @@ impl LogStore { session.label(), session .adapter_client() - .map_or(false, |client| client.has_adapter_logs()), + .is_some_and(|client| client.has_adapter_logs()), ) }); @@ -485,7 +485,7 @@ impl LogStore { &mut self, id: &LogStoreEntryIdentifier<'_>, ) -> Option<&Vec> { - self.get_debug_adapter_state(&id) + self.get_debug_adapter_state(id) .map(|state| &state.rpc_messages.initialization_sequence) } } @@ -536,11 +536,11 @@ impl Render for DapLogToolbarItemView { }) .unwrap_or_else(|| "No adapter selected".into()), )) - .menu(move |mut window, cx| { + .menu(move |window, cx| { let log_view = log_view.clone(); let menu_rows = menu_rows.clone(); let project = project.clone(); - ContextMenu::build(&mut window, cx, move |mut menu, window, _cx| { + ContextMenu::build(window, cx, move |mut menu, window, _cx| { for row in menu_rows.into_iter() { menu = menu.custom_row(move |_window, _cx| { div() @@ -661,11 +661,11 @@ impl ToolbarItemView for DapLogToolbarItemView { _window: &mut Window, cx: &mut Context, ) -> workspace::ToolbarItemLocation { - if let Some(item) = active_pane_item { - if let Some(log_view) = item.downcast::() { - self.log_view = Some(log_view.clone()); - return workspace::ToolbarItemLocation::PrimaryLeft; - } + if let Some(item) = active_pane_item + && let Some(log_view) = item.downcast::() + { + self.log_view = Some(log_view); + return workspace::ToolbarItemLocation::PrimaryLeft; } self.log_view = None; @@ -1131,7 +1131,7 @@ impl LogStore { project: &WeakEntity, session_id: SessionId, ) -> Vec { - self.projects.get(&project).map_or(vec![], |state| { + self.projects.get(project).map_or(vec![], |state| { state .debug_sessions .get(&session_id) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 0ac419580bd7db2d17b07b002b133163497a1e9d..f81c1fff89a965ae99205d405d1afa52bdde813a 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -36,7 +36,7 @@ use settings::Settings; use std::sync::{Arc, LazyLock}; use task::{DebugScenario, TaskContext}; use tree_sitter::{Query, StreamingIterator as _}; -use ui::{ContextMenu, Divider, PopoverMenuHandle, Tooltip, prelude::*}; +use ui::{ContextMenu, Divider, PopoverMenuHandle, Tab, Tooltip, prelude::*}; use util::{ResultExt, debug_panic, maybe}; use workspace::SplitDirection; use workspace::item::SaveOptions; @@ -257,7 +257,7 @@ impl DebugPanel { .as_ref() .map(|entity| entity.downgrade()), task_context: task_context.clone(), - worktree_id: worktree_id, + worktree_id, }); }; running.resolve_scenario( @@ -386,10 +386,10 @@ impl DebugPanel { return; }; - let dap_store_handle = self.project.read(cx).dap_store().clone(); + let dap_store_handle = self.project.read(cx).dap_store(); let label = curr_session.read(cx).label(); let quirks = curr_session.read(cx).quirks(); - let adapter = curr_session.read(cx).adapter().clone(); + let adapter = curr_session.read(cx).adapter(); let binary = curr_session.read(cx).binary().cloned().unwrap(); let task_context = curr_session.read(cx).task_context().clone(); @@ -447,9 +447,9 @@ impl DebugPanel { return; }; - let dap_store_handle = self.project.read(cx).dap_store().clone(); + let dap_store_handle = self.project.read(cx).dap_store(); let label = self.label_for_child_session(&parent_session, request, cx); - let adapter = parent_session.read(cx).adapter().clone(); + let adapter = parent_session.read(cx).adapter(); let quirks = parent_session.read(cx).quirks(); let Some(mut binary) = parent_session.read(cx).binary().cloned() else { log::error!("Attempted to start a child-session without a binary"); @@ -530,10 +530,9 @@ impl DebugPanel { .active_session .as_ref() .map(|session| session.entity_id()) + && active_session_id == entity_id { - if active_session_id == entity_id { - this.active_session = this.sessions_with_children.keys().next().cloned(); - } + this.active_session = this.sessions_with_children.keys().next().cloned(); } cx.notify() }) @@ -642,14 +641,16 @@ impl DebugPanel { } }) }; + let documentation_button = || { IconButton::new("debug-open-documentation", IconName::CircleHelp) .icon_size(IconSize::Small) .on_click(move |_, _, cx| cx.open_url("https://zed.dev/docs/debugger")) .tooltip(Tooltip::text("Open Documentation")) }; + let logs_button = || { - IconButton::new("debug-open-logs", IconName::ScrollText) + IconButton::new("debug-open-logs", IconName::Notepad) .icon_size(IconSize::Small) .on_click(move |_, window, cx| { window.dispatch_action(debugger_tools::OpenDebugAdapterLogs.boxed_clone(), cx) @@ -658,16 +659,18 @@ impl DebugPanel { }; Some( - div.border_b_1() - .border_color(cx.theme().colors().border) - .p_1() + div.w_full() + .py_1() + .px_1p5() .justify_between() - .w_full() + .border_b_1() + .border_color(cx.theme().colors().border) .when(is_side, |this| this.gap_1()) .child( h_flex() + .justify_between() .child( - h_flex().gap_2().w_full().when_some( + h_flex().gap_1().w_full().when_some( active_session .as_ref() .map(|session| session.read(cx).running_state()), @@ -679,6 +682,7 @@ impl DebugPanel { let capabilities = running_state.read(cx).capabilities(cx); let supports_detach = running_state.read(cx).session().read(cx).is_attached(); + this.map(|this| { if thread_status == ThreadStatus::Running { this.child( @@ -686,10 +690,9 @@ impl DebugPanel { "debug-pause", IconName::DebugPause, ) - .icon_size(IconSize::XSmall) - .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _window, cx| { this.pause_thread(cx); }, @@ -698,7 +701,7 @@ impl DebugPanel { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Pause program", + "Pause Program", &Pause, &focus_handle, window, @@ -713,10 +716,9 @@ impl DebugPanel { "debug-continue", IconName::DebugContinue, ) - .icon_size(IconSize::XSmall) - .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _window, cx| this.continue_thread(cx), )) .disabled(thread_status != ThreadStatus::Stopped) @@ -724,7 +726,7 @@ impl DebugPanel { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Continue program", + "Continue Program", &Continue, &focus_handle, window, @@ -737,10 +739,9 @@ impl DebugPanel { }) .child( IconButton::new("debug-step-over", IconName::ArrowRight) - .icon_size(IconSize::XSmall) - .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _window, cx| { this.step_over(cx); }, @@ -750,7 +751,7 @@ impl DebugPanel { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Step over", + "Step Over", &StepOver, &focus_handle, window, @@ -764,10 +765,9 @@ impl DebugPanel { "debug-step-into", IconName::ArrowDownRight, ) - .icon_size(IconSize::XSmall) - .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _window, cx| { this.step_in(cx); }, @@ -777,7 +777,7 @@ impl DebugPanel { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Step in", + "Step In", &StepInto, &focus_handle, window, @@ -788,10 +788,9 @@ impl DebugPanel { ) .child( IconButton::new("debug-step-out", IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _window, cx| { this.step_out(cx); }, @@ -801,7 +800,7 @@ impl DebugPanel { let focus_handle = focus_handle.clone(); move |window, cx| { Tooltip::for_action_in( - "Step out", + "Step Out", &StepOut, &focus_handle, window, @@ -812,10 +811,10 @@ impl DebugPanel { ) .child(Divider::vertical()) .child( - IconButton::new("debug-restart", IconName::DebugRestart) - .icon_size(IconSize::XSmall) + IconButton::new("debug-restart", IconName::RotateCcw) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, window, cx| { this.rerun_session(window, cx); }, @@ -835,9 +834,9 @@ impl DebugPanel { ) .child( IconButton::new("debug-stop", IconName::Power) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _window, cx| { if this.session().read(cx).is_building() { this.session().update(cx, |session, cx| { @@ -890,9 +889,9 @@ impl DebugPanel { thread_status != ThreadStatus::Stopped && thread_status != ThreadStatus::Running, ) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .on_click(window.listener_for( - &running_state, + running_state, |this, _, _, cx| { this.detach_client(cx); }, @@ -915,7 +914,6 @@ impl DebugPanel { }, ), ) - .justify_around() .when(is_side, |this| { this.child(new_session_button()) .child(logs_button()) @@ -924,7 +922,7 @@ impl DebugPanel { ) .child( h_flex() - .gap_2() + .gap_0p5() .when(is_side, |this| this.justify_between()) .child( h_flex().when_some( @@ -934,7 +932,6 @@ impl DebugPanel { .cloned(), |this, running_state| { this.children({ - let running_state = running_state.clone(); let threads = running_state.update(cx, |running_state, cx| { let session = running_state.session(); @@ -954,12 +951,15 @@ impl DebugPanel { ) }) }) - .when(!is_side, |this| this.gap_2().child(Divider::vertical())) + .when(!is_side, |this| { + this.gap_0p5().child(Divider::vertical()) + }) }, ), ) .child( h_flex() + .gap_0p5() .children(self.render_session_menu( self.active_session(), self.running_state(cx), @@ -1158,7 +1158,7 @@ impl DebugPanel { workspace .project() .read(cx) - .project_path_for_absolute_path(&path, cx) + .project_path_for_absolute_path(path, cx) .context( "Couldn't get project path for .zed/debug.json in active worktree", ) @@ -1300,10 +1300,10 @@ impl DebugPanel { cx: &mut Context<'_, Self>, ) -> Option { let adapter = parent_session.read(cx).adapter(); - if let Some(adapter) = DapRegistry::global(cx).adapter(&adapter) { - if let Some(label) = adapter.label_for_child_session(request) { - return Some(label.into()); - } + if let Some(adapter) = DapRegistry::global(cx).adapter(&adapter) + && let Some(label) = adapter.label_for_child_session(request) + { + return Some(label.into()); } None } @@ -1644,7 +1644,6 @@ impl Render for DebugPanel { } }) .on_action({ - let this = this.clone(); move |_: &ToggleSessionPicker, window, cx| { this.update(cx, |this, cx| { this.toggle_session_picker(window, cx); @@ -1702,6 +1701,7 @@ impl Render for DebugPanel { this.child(active_session) } else { let docked_to_bottom = self.position(window, cx) == DockPosition::Bottom; + let welcome_experience = v_flex() .when_else( docked_to_bottom, @@ -1767,54 +1767,58 @@ impl Render for DebugPanel { ); }), ); - let breakpoint_list = - v_flex() - .group("base-breakpoint-list") - .items_start() - .when_else( - docked_to_bottom, - |this| this.min_w_1_3().h_full(), - |this| this.w_full().h_2_3(), - ) - .p_1() - .child( - h_flex() - .pl_1() - .w_full() - .justify_between() - .child(Label::new("Breakpoints").size(LabelSize::Small)) - .child(h_flex().visible_on_hover("base-breakpoint-list").child( + + let breakpoint_list = v_flex() + .group("base-breakpoint-list") + .when_else( + docked_to_bottom, + |this| this.min_w_1_3().h_full(), + |this| this.size_full().h_2_3(), + ) + .child( + h_flex() + .track_focus(&self.breakpoint_list.focus_handle(cx)) + .h(Tab::container_height(cx)) + .p_1p5() + .w_full() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(Label::new("Breakpoints").size(LabelSize::Small)) + .child( + h_flex().visible_on_hover("base-breakpoint-list").child( self.breakpoint_list.read(cx).render_control_strip(), - )) - .track_focus(&self.breakpoint_list.focus_handle(cx)), - ) - .child(Divider::horizontal()) - .child(self.breakpoint_list.clone()); + ), + ), + ) + .child(self.breakpoint_list.clone()); + this.child( v_flex() - .h_full() + .size_full() .gap_1() .items_center() .justify_center() - .child( - div() - .when_else(docked_to_bottom, Div::h_flex, Div::v_flex) - .size_full() - .map(|this| { - if docked_to_bottom { - this.items_start() - .child(breakpoint_list) - .child(Divider::vertical()) - .child(welcome_experience) - .child(Divider::vertical()) - } else { - this.items_end() - .child(welcome_experience) - .child(Divider::horizontal()) - .child(breakpoint_list) - } - }), - ), + .map(|this| { + if docked_to_bottom { + this.child( + h_flex() + .size_full() + .child(breakpoint_list) + .child(Divider::vertical()) + .child(welcome_experience) + .child(Divider::vertical()), + ) + } else { + this.child( + v_flex() + .size_full() + .child(welcome_experience) + .child(Divider::horizontal()) + .child(breakpoint_list), + ) + } + }), ) } }) diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 5f5dfd1a1e6a543cdb7a4d87e1b8e9984c4ecba9..581cc16ff4c9a41e24036bcd3ff000ad47d3a076 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -272,7 +272,6 @@ pub fn init(cx: &mut App) { } }) .on_action({ - let active_item = active_item.clone(); move |_: &ToggleIgnoreBreakpoints, _, cx| { active_item .update(cx, |item, cx| item.toggle_ignore_breakpoints(cx)) @@ -293,9 +292,8 @@ pub fn init(cx: &mut App) { let Some(debug_panel) = workspace.read(cx).panel::(cx) else { return; }; - let Some(active_session) = debug_panel - .clone() - .update(cx, |panel, _| panel.active_session()) + let Some(active_session) = + debug_panel.update(cx, |panel, _| panel.active_session()) else { return; }; diff --git a/crates/debugger_ui/src/dropdown_menus.rs b/crates/debugger_ui/src/dropdown_menus.rs index dca15eb0527cfc78bd137889a1910e6b32abf98c..c5399f6f69648dcfa775a6dd6da62bd637124f2c 100644 --- a/crates/debugger_ui/src/dropdown_menus.rs +++ b/crates/debugger_ui/src/dropdown_menus.rs @@ -272,10 +272,9 @@ impl DebugPanel { .child(session_entry.label_element(self_depth, cx)) .child( IconButton::new("close-debug-session", IconName::Close) - .visible_on_hover(id.clone()) + .visible_on_hover(id) .icon_size(IconSize::Small) .on_click({ - let weak = weak.clone(); move |_, window, cx| { weak.update(cx, |panel, cx| { panel.close_session(session_entity_id, window, cx); diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index 4ac8e371a15052a00ed962480a9f694a8802007c..b30e3995ffdb994fdb9c936821b360ef7e6eff04 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -343,10 +343,10 @@ impl NewProcessModal { return; } - if let NewProcessMode::Launch = &self.mode { - if self.configure_mode.read(cx).save_to_debug_json.selected() { - self.save_debug_scenario(window, cx); - } + if let NewProcessMode::Launch = &self.mode + && self.configure_mode.read(cx).save_to_debug_json.selected() + { + self.save_debug_scenario(window, cx); } let Some(debugger) = self.debugger.clone() else { @@ -413,7 +413,7 @@ impl NewProcessModal { let Some(adapter) = self.debugger.as_ref() else { return; }; - let scenario = self.debug_scenario(&adapter, cx); + let scenario = self.debug_scenario(adapter, cx); cx.spawn_in(window, async move |this, cx| { let scenario = scenario.await.context("no scenario to save")?; let worktree_id = task_contexts @@ -659,12 +659,7 @@ impl Render for NewProcessModal { this.mode = NewProcessMode::Attach; if let Some(debugger) = this.debugger.as_ref() { - Self::update_attach_picker( - &this.attach_mode, - &debugger, - window, - cx, - ); + Self::update_attach_picker(&this.attach_mode, debugger, window, cx); } this.mode_focus_handle(cx).focus(window); cx.notify(); @@ -790,7 +785,7 @@ impl RenderOnce for AttachMode { v_flex() .w_full() .track_focus(&self.attach_picker.focus_handle(cx)) - .child(self.attach_picker.clone()) + .child(self.attach_picker) } } @@ -1083,7 +1078,7 @@ impl DebugDelegate { .into_iter() .map(|(scenario, context)| { let (kind, scenario) = - Self::get_scenario_kind(&languages, &dap_registry, scenario); + Self::get_scenario_kind(&languages, dap_registry, scenario); (kind, scenario, Some(context)) }) .chain( @@ -1100,7 +1095,7 @@ impl DebugDelegate { .filter(|(_, scenario)| valid_adapters.contains(&scenario.adapter)) .map(|(kind, scenario)| { let (language, scenario) = - Self::get_scenario_kind(&languages, &dap_registry, scenario); + Self::get_scenario_kind(&languages, dap_registry, scenario); (language.or(Some(kind)), scenario, None) }), ) diff --git a/crates/debugger_ui/src/onboarding_modal.rs b/crates/debugger_ui/src/onboarding_modal.rs index c9fa0099406ebbee1cd48a54e4ece483c74406d8..2a9f68d0c9e584e9674fce228c9597c3d8fe8dec 100644 --- a/crates/debugger_ui/src/onboarding_modal.rs +++ b/crates/debugger_ui/src/onboarding_modal.rs @@ -131,7 +131,7 @@ impl Render for DebuggerOnboardingModal { .child(Headline::new("Zed's Debugger").size(HeadlineSize::Large)), ) .child(h_flex().absolute().top_2().right_2().child( - IconButton::new("cancel", IconName::X).on_click(cx.listener( + IconButton::new("cancel", IconName::Close).on_click(cx.listener( |_, _: &ClickEvent, _window, cx| { debugger_onboarding_event!("Cancelled", trigger = "X click"); cx.emit(DismissEvent); diff --git a/crates/debugger_ui/src/persistence.rs b/crates/debugger_ui/src/persistence.rs index 3a0ad7a40e60d4dc28f2086b94a0a43186978542..cff2ba83355208d702cf7936c46ea5b167c7c649 100644 --- a/crates/debugger_ui/src/persistence.rs +++ b/crates/debugger_ui/src/persistence.rs @@ -256,7 +256,7 @@ pub(crate) fn deserialize_pane_layout( Some(Member::Axis(PaneAxis::load( if should_invert { axis.invert() } else { axis }, members, - flexes.clone(), + flexes, ))) } SerializedPaneLayout::Pane(serialized_pane) => { @@ -341,7 +341,7 @@ impl SerializedPaneLayout { pub(crate) fn in_order(&self) -> Vec { let mut panes = vec![]; - Self::inner_in_order(&self, &mut panes); + Self::inner_in_order(self, &mut panes); panes } diff --git a/crates/debugger_ui/src/session.rs b/crates/debugger_ui/src/session.rs index 73cfef78cc6410196441ff974f09b5abe3d86916..0fc003a14dd9ac51c2608df86644a628a44b3e8e 100644 --- a/crates/debugger_ui/src/session.rs +++ b/crates/debugger_ui/src/session.rs @@ -87,7 +87,7 @@ impl DebugSession { self.stack_trace_view.get_or_init(|| { let stackframe_list = running_state.read(cx).stack_frame_list().clone(); - let stack_frame_view = cx.new(|cx| { + cx.new(|cx| { StackTraceView::new( workspace.clone(), project.clone(), @@ -95,9 +95,7 @@ impl DebugSession { window, cx, ) - }); - - stack_frame_view + }) }) } diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index f2f9e17d8981d8c8b73ccd4e3caad51f5278eb7d..0574091851f8f99e10ab8d1f7ec769177826e41a 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -48,10 +48,8 @@ use task::{ }; use terminal_view::TerminalView; use ui::{ - ActiveTheme, AnyElement, App, ButtonCommon as _, Clickable as _, Context, FluentBuilder, - IconButton, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon as _, - ParentElement, Render, SharedString, StatefulInteractiveElement, Styled, Tab, Tooltip, - VisibleOnHover, VisualContext, Window, div, h_flex, v_flex, + FluentBuilder, IntoElement, Render, StatefulInteractiveElement, Tab, Tooltip, VisibleOnHover, + VisualContext, prelude::*, }; use util::ResultExt; use variable_list::VariableList; @@ -104,7 +102,7 @@ impl Render for RunningState { .find(|pane| pane.read(cx).is_zoomed()); let active = self.panes.panes().into_iter().next(); - let pane = if let Some(ref zoomed_pane) = zoomed_pane { + let pane = if let Some(zoomed_pane) = zoomed_pane { zoomed_pane.update(cx, |pane, cx| pane.render(window, cx).into_any_element()) } else if let Some(active) = active { self.panes @@ -182,7 +180,7 @@ impl SubView { let weak_list = list.downgrade(); let focus_handle = list.focus_handle(cx); let this = Self::new( - focus_handle.clone(), + focus_handle, list.into(), DebuggerPaneItem::BreakpointList, cx, @@ -293,7 +291,7 @@ pub(crate) fn new_debugger_pane( let Some(project) = project.upgrade() else { return ControlFlow::Break(()); }; - let this_pane = cx.entity().clone(); + let this_pane = cx.entity(); let item = if tab.pane == this_pane { pane.item_for_index(tab.ix) } else { @@ -360,7 +358,7 @@ pub(crate) fn new_debugger_pane( } }; - let ret = cx.new(move |cx| { + cx.new(move |cx| { let mut pane = Pane::new( workspace.clone(), project.clone(), @@ -416,19 +414,19 @@ pub(crate) fn new_debugger_pane( .and_then(|item| item.downcast::()); let is_hovered = as_subview .as_ref() - .map_or(false, |item| item.read(cx).hovered); + .is_some_and(|item| item.read(cx).hovered); h_flex() + .track_focus(&focus_handle) .group(pane_group_id.clone()) + .pl_1p5() + .pr_1() .justify_between() - .bg(cx.theme().colors().tab_bar_background) .border_b_1() - .px_2() .border_color(cx.theme().colors().border) - .track_focus(&focus_handle) + .bg(cx.theme().colors().tab_bar_background) .on_action(|_: &menu::Cancel, window, cx| { if cx.stop_active_drag(window) { - return; } else { cx.propagate(); } @@ -450,7 +448,7 @@ pub(crate) fn new_debugger_pane( .children(pane.items().enumerate().map(|(ix, item)| { let selected = active_pane_item .as_ref() - .map_or(false, |active| active.item_id() == item.item_id()); + .is_some_and(|active| active.item_id() == item.item_id()); let deemphasized = !pane.has_focus(window, cx); let item_ = item.boxed_clone(); div() @@ -503,7 +501,7 @@ pub(crate) fn new_debugger_pane( .on_drag( DraggedTab { item: item.boxed_clone(), - pane: cx.entity().clone(), + pane: cx.entity(), detail: 0, is_active: selected, ix, @@ -514,6 +512,7 @@ pub(crate) fn new_debugger_pane( ) .child({ let zoomed = pane.is_zoomed(); + h_flex() .visible_on_hover(pane_group_id) .when(is_hovered, |this| this.visible()) @@ -537,7 +536,7 @@ pub(crate) fn new_debugger_pane( IconName::Maximize }, ) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .on_click(cx.listener(move |pane, _, _, cx| { let is_zoomed = pane.is_zoomed(); pane.set_zoomed(!is_zoomed, cx); @@ -563,9 +562,7 @@ pub(crate) fn new_debugger_pane( } }); pane - }); - - ret + }) } pub struct DebugTerminal { @@ -592,10 +589,11 @@ impl DebugTerminal { } impl gpui::Render for DebugTerminal { - fn render(&mut self, _window: &mut Window, _: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { div() - .size_full() .track_focus(&self.focus_handle) + .size_full() + .bg(cx.theme().colors().editor_background) .children(self.terminal.clone()) } } @@ -626,7 +624,7 @@ impl RunningState { if s.starts_with("\"$ZED_") && s.ends_with('"') { *s = s[1..s.len() - 1].to_string(); } - if let Some(substituted) = substitute_variables_in_str(&s, context) { + if let Some(substituted) = substitute_variables_in_str(s, context) { *s = substituted; } } @@ -656,7 +654,7 @@ impl RunningState { } resolve_path(s); - if let Some(substituted) = substitute_variables_in_str(&s, context) { + if let Some(substituted) = substitute_variables_in_str(s, context) { *s = substituted; } } @@ -953,7 +951,7 @@ impl RunningState { inventory.read(cx).task_template_by_label( buffer, worktree_id, - &label, + label, cx, ) }) @@ -1014,10 +1012,9 @@ impl RunningState { ..task.resolved.clone() }; let terminal = project - .update_in(cx, |project, window, cx| { + .update(cx, |project, cx| { project.create_terminal( TerminalKind::Task(task_with_shell.clone()), - window.window_handle(), cx, ) })? @@ -1116,9 +1113,8 @@ impl RunningState { }; let session = self.session.read(cx); - let cwd = Some(&request.cwd) - .filter(|cwd| cwd.len() > 0) - .map(PathBuf::from) + let cwd = (!request.cwd.is_empty()) + .then(|| PathBuf::from(&request.cwd)) .or_else(|| session.binary().unwrap().cwd.clone()); let mut envs: HashMap = @@ -1153,7 +1149,7 @@ impl RunningState { } else { None } - } else if args.len() > 0 { + } else if !args.is_empty() { Some(args.remove(0)) } else { None @@ -1170,9 +1166,9 @@ impl RunningState { id: task::TaskId("debug".to_string()), full_label: title.clone(), label: title.clone(), - command: command.clone(), + command, args, - command_label: title.clone(), + command_label: title, cwd, env: envs, use_new_terminal: true, @@ -1189,9 +1185,7 @@ impl RunningState { let workspace = self.workspace.clone(); let weak_project = project.downgrade(); - let terminal_task = project.update(cx, |project, cx| { - project.create_terminal(kind, window.window_handle(), cx) - }); + let terminal_task = project.update(cx, |project, cx| project.create_terminal(kind, cx)); let terminal_task = cx.spawn_in(window, async move |_, cx| { let terminal = terminal_task.await?; @@ -1312,7 +1306,7 @@ impl RunningState { let mut pane_item_status = IndexMap::from_iter( DebuggerPaneItem::all() .iter() - .filter(|kind| kind.is_supported(&caps)) + .filter(|kind| kind.is_supported(caps)) .map(|kind| (*kind, false)), ); self.panes.panes().iter().for_each(|pane| { @@ -1373,7 +1367,7 @@ impl RunningState { this.serialize_layout(window, cx); match event { Event::Remove { .. } => { - let _did_find_pane = this.panes.remove(&source_pane).is_ok(); + let _did_find_pane = this.panes.remove(source_pane).is_ok(); debug_assert!(_did_find_pane); cx.notify(); } @@ -1761,7 +1755,7 @@ impl RunningState { this.activate_item(0, false, false, window, cx); }); - let rightmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx); + let rightmost_pane = new_debugger_pane(workspace.clone(), project, window, cx); rightmost_pane.update(cx, |this, cx| { this.add_item( Box::new(SubView::new( diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index a6defbbf35cc103025c286b9875285b924032ed0..233dba4c52e28c6e1f1b9205cb7481d487040fb1 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -23,11 +23,8 @@ use project::{ worktree_store::WorktreeStore, }; use ui::{ - ActiveTheme, AnyElement, App, ButtonCommon, Clickable, Color, Context, Disableable, Div, - Divider, FluentBuilder as _, Icon, IconButton, IconName, IconSize, InteractiveElement, - IntoElement, Label, LabelCommon, LabelSize, ListItem, ParentElement, Render, RenderOnce, - Scrollbar, ScrollbarState, SharedString, StatefulInteractiveElement, Styled, Toggleable, - Tooltip, Window, div, h_flex, px, v_flex, + Divider, DividerColor, FluentBuilder as _, Indicator, IntoElement, ListItem, Render, Scrollbar, + ScrollbarState, StatefulInteractiveElement, Tooltip, prelude::*, }; use workspace::Workspace; use zed_actions::{ToggleEnableBreakpoint, UnsetBreakpoint}; @@ -242,14 +239,12 @@ impl BreakpointList { } fn select_next(&mut self, _: &menu::SelectNext, window: &mut Window, cx: &mut Context) { - if self.strip_mode.is_some() { - if self.input.focus_handle(cx).contains_focused(window, cx) { - cx.propagate(); - return; - } + if self.strip_mode.is_some() && self.input.focus_handle(cx).contains_focused(window, cx) { + cx.propagate(); + return; } let ix = match self.selected_ix { - _ if self.breakpoints.len() == 0 => None, + _ if self.breakpoints.is_empty() => None, None => Some(0), Some(ix) => { if ix == self.breakpoints.len() - 1 { @@ -268,14 +263,12 @@ impl BreakpointList { window: &mut Window, cx: &mut Context, ) { - if self.strip_mode.is_some() { - if self.input.focus_handle(cx).contains_focused(window, cx) { - cx.propagate(); - return; - } + if self.strip_mode.is_some() && self.input.focus_handle(cx).contains_focused(window, cx) { + cx.propagate(); + return; } let ix = match self.selected_ix { - _ if self.breakpoints.len() == 0 => None, + _ if self.breakpoints.is_empty() => None, None => Some(self.breakpoints.len() - 1), Some(ix) => { if ix == 0 { @@ -289,13 +282,11 @@ impl BreakpointList { } fn select_first(&mut self, _: &menu::SelectFirst, window: &mut Window, cx: &mut Context) { - if self.strip_mode.is_some() { - if self.input.focus_handle(cx).contains_focused(window, cx) { - cx.propagate(); - return; - } + if self.strip_mode.is_some() && self.input.focus_handle(cx).contains_focused(window, cx) { + cx.propagate(); + return; } - let ix = if self.breakpoints.len() > 0 { + let ix = if !self.breakpoints.is_empty() { Some(0) } else { None @@ -304,13 +295,11 @@ impl BreakpointList { } fn select_last(&mut self, _: &menu::SelectLast, window: &mut Window, cx: &mut Context) { - if self.strip_mode.is_some() { - if self.input.focus_handle(cx).contains_focused(window, cx) { - cx.propagate(); - return; - } + if self.strip_mode.is_some() && self.input.focus_handle(cx).contains_focused(window, cx) { + cx.propagate(); + return; } - let ix = if self.breakpoints.len() > 0 { + let ix = if !self.breakpoints.is_empty() { Some(self.breakpoints.len() - 1) } else { None @@ -340,8 +329,8 @@ impl BreakpointList { let text = self.input.read(cx).text(cx); match mode { - ActiveBreakpointStripMode::Log => match &entry.kind { - BreakpointEntryKind::LineBreakpoint(line_breakpoint) => { + ActiveBreakpointStripMode::Log => { + if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &entry.kind { Self::edit_line_breakpoint_inner( &self.breakpoint_store, line_breakpoint.breakpoint.path.clone(), @@ -350,10 +339,9 @@ impl BreakpointList { cx, ); } - _ => {} - }, - ActiveBreakpointStripMode::Condition => match &entry.kind { - BreakpointEntryKind::LineBreakpoint(line_breakpoint) => { + } + ActiveBreakpointStripMode::Condition => { + if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &entry.kind { Self::edit_line_breakpoint_inner( &self.breakpoint_store, line_breakpoint.breakpoint.path.clone(), @@ -362,10 +350,9 @@ impl BreakpointList { cx, ); } - _ => {} - }, - ActiveBreakpointStripMode::HitCondition => match &entry.kind { - BreakpointEntryKind::LineBreakpoint(line_breakpoint) => { + } + ActiveBreakpointStripMode::HitCondition => { + if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &entry.kind { Self::edit_line_breakpoint_inner( &self.breakpoint_store, line_breakpoint.breakpoint.path.clone(), @@ -374,8 +361,7 @@ impl BreakpointList { cx, ); } - _ => {} - }, + } } self.focus_handle.focus(window); } else { @@ -404,11 +390,9 @@ impl BreakpointList { let Some(entry) = self.selected_ix.and_then(|ix| self.breakpoints.get_mut(ix)) else { return; }; - if self.strip_mode.is_some() { - if self.input.focus_handle(cx).contains_focused(window, cx) { - cx.propagate(); - return; - } + if self.strip_mode.is_some() && self.input.focus_handle(cx).contains_focused(window, cx) { + cx.propagate(); + return; } match &mut entry.kind { @@ -439,13 +423,10 @@ impl BreakpointList { return; }; - match &mut entry.kind { - BreakpointEntryKind::LineBreakpoint(line_breakpoint) => { - let path = line_breakpoint.breakpoint.path.clone(); - let row = line_breakpoint.breakpoint.row; - self.edit_line_breakpoint(path, row, BreakpointEditAction::Toggle, cx); - } - _ => {} + if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &mut entry.kind { + let path = line_breakpoint.breakpoint.path.clone(); + let row = line_breakpoint.breakpoint.row; + self.edit_line_breakpoint(path, row, BreakpointEditAction::Toggle, cx); } cx.notify(); } @@ -497,7 +478,7 @@ impl BreakpointList { fn toggle_data_breakpoint(&mut self, id: &str, cx: &mut Context) { if let Some(session) = &self.session { session.update(cx, |this, cx| { - this.toggle_data_breakpoint(&id, cx); + this.toggle_data_breakpoint(id, cx); }); } } @@ -505,7 +486,7 @@ impl BreakpointList { fn toggle_exception_breakpoint(&mut self, id: &str, cx: &mut Context) { if let Some(session) = &self.session { session.update(cx, |this, cx| { - this.toggle_exception_breakpoint(&id, cx); + this.toggle_exception_breakpoint(id, cx); }); cx.notify(); const EXCEPTION_SERIALIZATION_INTERVAL: Duration = Duration::from_secs(1); @@ -541,7 +522,7 @@ impl BreakpointList { cx.background_executor() .spawn(async move { KEY_VALUE_STORE.write_kvp(key, value?).await }) } else { - return Task::ready(Result::Ok(())); + Task::ready(Result::Ok(())) } } @@ -569,6 +550,7 @@ impl BreakpointList { .map(|session| SupportedBreakpointProperties::from(session.read(cx).capabilities())) .unwrap_or_else(SupportedBreakpointProperties::empty); let strip_mode = self.strip_mode; + uniform_list( "breakpoint-list", self.breakpoints.len(), @@ -591,7 +573,7 @@ impl BreakpointList { }), ) .track_scroll(self.scroll_handle.clone()) - .flex_grow() + .flex_1() } fn render_vertical_scrollbar(&self, cx: &mut Context) -> Stateful
{ @@ -630,6 +612,7 @@ impl BreakpointList { pub(crate) fn render_control_strip(&self) -> AnyElement { let selection_kind = self.selection_kind(); let focus_handle = self.focus_handle.clone(); + let remove_breakpoint_tooltip = selection_kind.map(|(kind, _)| match kind { SelectedBreakpointKind::Source => "Remove breakpoint from a breakpoint list", SelectedBreakpointKind::Exception => { @@ -637,6 +620,7 @@ impl BreakpointList { } SelectedBreakpointKind::Data => "Remove data breakpoint from a breakpoint list", }); + let toggle_label = selection_kind.map(|(_, is_enabled)| { if is_enabled { ( @@ -649,13 +633,12 @@ impl BreakpointList { }); h_flex() - .gap_2() .child( IconButton::new( "disable-breakpoint-breakpoint-list", IconName::DebugDisabledBreakpoint, ) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .when_some(toggle_label, |this, (label, meta)| { this.tooltip({ let focus_handle = focus_handle.clone(); @@ -681,9 +664,8 @@ impl BreakpointList { }), ) .child( - IconButton::new("remove-breakpoint-breakpoint-list", IconName::X) - .icon_size(IconSize::XSmall) - .icon_color(ui::Color::Error) + IconButton::new("remove-breakpoint-breakpoint-list", IconName::Trash) + .icon_size(IconSize::Small) .when_some(remove_breakpoint_tooltip, |this, tooltip| { this.tooltip({ let focus_handle = focus_handle.clone(); @@ -703,14 +685,12 @@ impl BreakpointList { selection_kind.map(|kind| kind.0) != Some(SelectedBreakpointKind::Source), ) .on_click({ - let focus_handle = focus_handle.clone(); move |_, window, cx| { focus_handle.focus(window); window.dispatch_action(UnsetBreakpoint.boxed_clone(), cx) } }), ) - .mr_2() .into_any_element() } } @@ -791,6 +771,7 @@ impl Render for BreakpointList { .chain(data_breakpoints) .chain(exception_breakpoints), ); + v_flex() .id("breakpoint-list") .key_context("BreakpointList") @@ -806,35 +787,33 @@ impl Render for BreakpointList { .on_action(cx.listener(Self::next_breakpoint_property)) .on_action(cx.listener(Self::previous_breakpoint_property)) .size_full() - .m_0p5() - .child( - v_flex() - .size_full() - .child(self.render_list(cx)) - .child(self.render_vertical_scrollbar(cx)), - ) + .pt_1() + .child(self.render_list(cx)) + .child(self.render_vertical_scrollbar(cx)) .when_some(self.strip_mode, |this, _| { - this.child(Divider::horizontal()).child( - h_flex() - // .w_full() - .m_0p5() - .p_0p5() - .border_1() - .rounded_sm() - .when( - self.input.focus_handle(cx).contains_focused(window, cx), - |this| { - let colors = cx.theme().colors(); - let border = if self.input.read(cx).read_only(cx) { - colors.border_disabled - } else { - colors.border_focused - }; - this.border_color(border) - }, - ) - .child(self.input.clone()), - ) + this.child(Divider::horizontal().color(DividerColor::Border)) + .child( + h_flex() + .p_1() + .rounded_sm() + .bg(cx.theme().colors().editor_background) + .border_1() + .when( + self.input.focus_handle(cx).contains_focused(window, cx), + |this| { + let colors = cx.theme().colors(); + + let border_color = if self.input.read(cx).read_only(cx) { + colors.border_disabled + } else { + colors.border_transparent + }; + + this.border_color(border_color) + }, + ) + .child(self.input.clone()), + ) }) } } @@ -865,12 +844,17 @@ impl LineBreakpoint { let path = self.breakpoint.path.clone(); let row = self.breakpoint.row; let is_enabled = self.breakpoint.state.is_enabled(); + let indicator = div() .id(SharedString::from(format!( "breakpoint-ui-toggle-{:?}/{}:{}", self.dir, self.name, self.line ))) - .cursor_pointer() + .child( + Icon::new(icon_name) + .color(Color::Debugger) + .size(IconSize::XSmall), + ) .tooltip({ let focus_handle = focus_handle.clone(); move |window, cx| { @@ -902,17 +886,14 @@ impl LineBreakpoint { .ok(); } }) - .child( - Icon::new(icon_name) - .color(Color::Debugger) - .size(IconSize::XSmall), - ) .on_mouse_down(MouseButton::Left, move |_, _, _| {}); ListItem::new(SharedString::from(format!( "breakpoint-ui-item-{:?}/{}:{}", self.dir, self.name, self.line ))) + .toggle_state(is_selected) + .inset(true) .on_click({ let weak = weak.clone(); move |_, window, cx| { @@ -922,23 +903,20 @@ impl LineBreakpoint { .ok(); } }) - .start_slot(indicator) - .rounded() .on_secondary_mouse_down(|_, _, cx| { cx.stop_propagation(); }) + .start_slot(indicator) .child( h_flex() - .w_full() - .mr_4() - .py_0p5() - .gap_1() - .min_h(px(26.)) - .justify_between() .id(SharedString::from(format!( "breakpoint-ui-on-click-go-to-line-{:?}/{}:{}", self.dir, self.name, self.line ))) + .w_full() + .gap_1() + .min_h(rems_from_px(26.)) + .justify_between() .on_click({ let weak = weak.clone(); move |_, window, cx| { @@ -949,9 +927,9 @@ impl LineBreakpoint { .ok(); } }) - .cursor_pointer() .child( h_flex() + .id("label-container") .gap_0p5() .child( Label::new(format!("{}:{}", self.name, self.line)) @@ -971,16 +949,18 @@ impl LineBreakpoint { .line_height_style(ui::LineHeightStyle::UiLabel) .truncate(), ) - })), + })) + .when_some(self.dir.as_ref(), |this, parent_dir| { + this.tooltip(Tooltip::text(format!( + "Worktree parent path: {parent_dir}" + ))) + }), ) - .when_some(self.dir.as_ref(), |this, parent_dir| { - this.tooltip(Tooltip::text(format!("Worktree parent path: {parent_dir}"))) - }) .child(BreakpointOptionsStrip { props, breakpoint: BreakpointEntry { kind: BreakpointEntryKind::LineBreakpoint(self.clone()), - weak: weak, + weak, }, is_selected, focus_handle, @@ -988,15 +968,16 @@ impl LineBreakpoint { index: ix, }), ) - .toggle_state(is_selected) } } + #[derive(Clone, Debug)] struct ExceptionBreakpoint { id: String, data: ExceptionBreakpointsFilter, is_enabled: bool, } + #[derive(Clone, Debug)] struct DataBreakpoint(project::debugger::session::DataBreakpointState); @@ -1017,17 +998,24 @@ impl DataBreakpoint { }; let is_enabled = self.0.is_enabled; let id = self.0.dap.data_id.clone(); + ListItem::new(SharedString::from(format!( "data-breakpoint-ui-item-{}", self.0.dap.data_id ))) - .rounded() + .toggle_state(is_selected) + .inset(true) .start_slot( div() .id(SharedString::from(format!( "data-breakpoint-ui-item-{}-click-handler", self.0.dap.data_id ))) + .child( + Icon::new(IconName::Binary) + .color(color) + .size(IconSize::Small), + ) .tooltip({ let focus_handle = focus_handle.clone(); move |window, cx| { @@ -1052,25 +1040,18 @@ impl DataBreakpoint { }) .ok(); } - }) - .cursor_pointer() - .child( - Icon::new(IconName::Binary) - .color(color) - .size(IconSize::Small), - ), + }), ) .child( h_flex() .w_full() - .mr_4() - .py_0p5() + .gap_1() + .min_h(rems_from_px(26.)) .justify_between() .child( v_flex() .py_1() .gap_1() - .min_h(px(26.)) .justify_center() .id(("data-breakpoint-label", ix)) .child( @@ -1091,7 +1072,6 @@ impl DataBreakpoint { index: ix, }), ) - .toggle_state(is_selected) } } @@ -1113,10 +1093,13 @@ impl ExceptionBreakpoint { let id = SharedString::from(&self.id); let is_enabled = self.is_enabled; let weak = list.clone(); + ListItem::new(SharedString::from(format!( "exception-breakpoint-ui-item-{}", self.id ))) + .toggle_state(is_selected) + .inset(true) .on_click({ let list = list.clone(); move |_, window, cx| { @@ -1124,7 +1107,6 @@ impl ExceptionBreakpoint { .ok(); } }) - .rounded() .on_secondary_mouse_down(|_, _, cx| { cx.stop_propagation(); }) @@ -1134,6 +1116,11 @@ impl ExceptionBreakpoint { "exception-breakpoint-ui-item-{}-click-handler", self.id ))) + .child( + Icon::new(IconName::Flame) + .color(color) + .size(IconSize::Small), + ) .tooltip({ let focus_handle = focus_handle.clone(); move |window, cx| { @@ -1151,32 +1138,24 @@ impl ExceptionBreakpoint { } }) .on_click({ - let list = list.clone(); move |_, _, cx| { list.update(cx, |this, cx| { this.toggle_exception_breakpoint(&id, cx); }) .ok(); } - }) - .cursor_pointer() - .child( - Icon::new(IconName::Flame) - .color(color) - .size(IconSize::Small), - ), + }), ) .child( h_flex() .w_full() - .mr_4() - .py_0p5() + .gap_1() + .min_h(rems_from_px(26.)) .justify_between() .child( v_flex() .py_1() .gap_1() - .min_h(px(26.)) .justify_center() .id(("exception-breakpoint-label", ix)) .child( @@ -1192,7 +1171,7 @@ impl ExceptionBreakpoint { props, breakpoint: BreakpointEntry { kind: BreakpointEntryKind::ExceptionBreakpoint(self.clone()), - weak: weak, + weak, }, is_selected, focus_handle, @@ -1200,7 +1179,6 @@ impl ExceptionBreakpoint { index: ix, }), ) - .toggle_state(is_selected) } } #[derive(Clone, Debug)] @@ -1302,6 +1280,7 @@ impl BreakpointEntry { } } } + bitflags::bitflags! { #[derive(Clone, Copy)] pub struct SupportedBreakpointProperties: u32 { @@ -1360,6 +1339,7 @@ impl BreakpointOptionsStrip { fn is_toggled(&self, expected_mode: ActiveBreakpointStripMode) -> bool { self.is_selected && self.strip_mode == Some(expected_mode) } + fn on_click_callback( &self, mode: ActiveBreakpointStripMode, @@ -1379,7 +1359,8 @@ impl BreakpointOptionsStrip { .ok(); } } - fn add_border( + + fn add_focus_styles( &self, kind: ActiveBreakpointStripMode, available: bool, @@ -1388,22 +1369,25 @@ impl BreakpointOptionsStrip { ) -> impl Fn(Div) -> Div { move |this: Div| { // Avoid layout shifts in case there's no colored border - let this = this.border_2().rounded_sm(); + let this = this.border_1().rounded_sm(); + let color = cx.theme().colors(); + if self.is_selected && self.strip_mode == Some(kind) { - let theme = cx.theme().colors(); if self.focus_handle.is_focused(window) { - this.border_color(theme.border_selected) + this.bg(color.editor_background) + .border_color(color.border_focused) } else { - this.border_color(theme.border_disabled) + this.border_color(color.border) } } else if !available { - this.border_color(cx.theme().colors().border_disabled) + this.border_color(color.border_transparent) } else { this } } } } + impl RenderOnce for BreakpointOptionsStrip { fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { let id = self.breakpoint.id(); @@ -1426,73 +1410,117 @@ impl RenderOnce for BreakpointOptionsStrip { }; let color_for_toggle = |is_enabled| { if is_enabled { - ui::Color::Default + Color::Default } else { - ui::Color::Muted + Color::Muted } }; h_flex() - .gap_1() + .gap_px() + .mr_3() // Space to avoid overlapping with the scrollbar .child( - div().map(self.add_border(ActiveBreakpointStripMode::Log, supports_logs, window, cx)) + div() + .map(self.add_focus_styles( + ActiveBreakpointStripMode::Log, + supports_logs, + window, + cx, + )) .child( IconButton::new( SharedString::from(format!("{id}-log-toggle")), - IconName::ScrollText, + IconName::Notepad, ) - .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) .style(style_for_toggle(ActiveBreakpointStripMode::Log, has_logs)) + .icon_size(IconSize::Small) .icon_color(color_for_toggle(has_logs)) + .when(has_logs, |this| this.indicator(Indicator::dot().color(Color::Info))) .disabled(!supports_logs) .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Log)) - .on_click(self.on_click_callback(ActiveBreakpointStripMode::Log)).tooltip(|window, cx| Tooltip::with_meta("Set Log Message", None, "Set log message to display (instead of stopping) when a breakpoint is hit", window, cx)) + .on_click(self.on_click_callback(ActiveBreakpointStripMode::Log)) + .tooltip(|window, cx| { + Tooltip::with_meta( + "Set Log Message", + None, + "Set log message to display (instead of stopping) when a breakpoint is hit.", + window, + cx, + ) + }), ) .when(!has_logs && !self.is_selected, |this| this.invisible()), ) .child( - div().map(self.add_border( - ActiveBreakpointStripMode::Condition, - supports_condition, - window, cx - )) + div() + .map(self.add_focus_styles( + ActiveBreakpointStripMode::Condition, + supports_condition, + window, + cx, + )) .child( IconButton::new( SharedString::from(format!("{id}-condition-toggle")), IconName::SplitAlt, ) - .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) .style(style_for_toggle( ActiveBreakpointStripMode::Condition, - has_condition + has_condition, )) + .icon_size(IconSize::Small) .icon_color(color_for_toggle(has_condition)) + .when(has_condition, |this| this.indicator(Indicator::dot().color(Color::Info))) .disabled(!supports_condition) .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Condition)) .on_click(self.on_click_callback(ActiveBreakpointStripMode::Condition)) - .tooltip(|window, cx| Tooltip::with_meta("Set Condition", None, "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met", window, cx)) + .tooltip(|window, cx| { + Tooltip::with_meta( + "Set Condition", + None, + "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met.", + window, + cx, + ) + }), ) .when(!has_condition && !self.is_selected, |this| this.invisible()), ) .child( - div().map(self.add_border( - ActiveBreakpointStripMode::HitCondition, - supports_hit_condition,window, cx - )) + div() + .map(self.add_focus_styles( + ActiveBreakpointStripMode::HitCondition, + supports_hit_condition, + window, + cx, + )) .child( IconButton::new( SharedString::from(format!("{id}-hit-condition-toggle")), IconName::ArrowDown10, ) - .icon_size(IconSize::XSmall) .style(style_for_toggle( ActiveBreakpointStripMode::HitCondition, has_hit_condition, )) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) .icon_color(color_for_toggle(has_hit_condition)) + .when(has_hit_condition, |this| this.indicator(Indicator::dot().color(Color::Info))) .disabled(!supports_hit_condition) .toggle_state(self.is_toggled(ActiveBreakpointStripMode::HitCondition)) - .on_click(self.on_click_callback(ActiveBreakpointStripMode::HitCondition)).tooltip(|window, cx| Tooltip::with_meta("Set Hit Condition", None, "Set expression that controls how many hits of the breakpoint are ignored.", window, cx)) + .on_click(self.on_click_callback(ActiveBreakpointStripMode::HitCondition)) + .tooltip(|window, cx| { + Tooltip::with_meta( + "Set Hit Condition", + None, + "Set expression that controls how many hits of the breakpoint are ignored.", + window, + cx, + ) + }), ) .when(!has_hit_condition && !self.is_selected, |this| { this.invisible() diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index 1385bec54ef77222485cd642174d50aa60fa289a..a801cedd26924198d6a0639a3832ab0253b53adb 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -352,7 +352,7 @@ impl Console { .child( div() .px_1() - .child(Icon::new(IconName::ChevronDownSmall).size(IconSize::XSmall)), + .child(Icon::new(IconName::ChevronDown).size(IconSize::XSmall)), ), ) .when( @@ -365,9 +365,9 @@ impl Console { Some(ContextMenu::build(window, cx, |context_menu, _, _| { context_menu .when_some(keybinding_target.clone(), |el, keybinding_target| { - el.context(keybinding_target.clone()) + el.context(keybinding_target) }) - .action("Watch expression", WatchExpression.boxed_clone()) + .action("Watch Expression", WatchExpression.boxed_clone()) })) }) }, @@ -452,18 +452,22 @@ impl Render for Console { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let query_focus_handle = self.query_bar.focus_handle(cx); self.update_output(window, cx); + v_flex() .track_focus(&self.focus_handle) .key_context("DebugConsole") .on_action(cx.listener(Self::evaluate)) .on_action(cx.listener(Self::watch_expression)) .size_full() + .border_2() + .bg(cx.theme().colors().editor_background) .child(self.render_console(cx)) .when(self.is_running(cx), |this| { this.child(Divider::horizontal()).child( h_flex() .on_action(cx.listener(Self::previous_query)) .on_action(cx.listener(Self::next_query)) + .p_1() .gap_1() .bg(cx.theme().colors().editor_background) .child(self.render_query_bar(cx)) @@ -474,6 +478,9 @@ impl Render for Console { .on_click(move |_, window, cx| { window.dispatch_action(Box::new(Confirm), cx) }) + .layer(ui::ElevationIndex::ModalSurface) + .size(ui::ButtonSize::Compact) + .child(Label::new("Evaluate")) .tooltip({ let query_focus_handle = query_focus_handle.clone(); @@ -486,10 +493,7 @@ impl Render for Console { cx, ) } - }) - .layer(ui::ElevationIndex::ModalSurface) - .size(ui::ButtonSize::Compact) - .child(Label::new("Evaluate")), + }), self.render_submit_menu( ElementId::Name("split-button-right-confirm-button".into()), Some(query_focus_handle.clone()), @@ -499,7 +503,6 @@ impl Render for Console { )), ) }) - .border_2() } } @@ -608,17 +611,16 @@ impl ConsoleQueryBarCompletionProvider { for variable in console.variable_list.update(cx, |variable_list, cx| { variable_list.completion_variables(cx) }) { - if let Some(evaluate_name) = &variable.evaluate_name { - if variables + if let Some(evaluate_name) = &variable.evaluate_name + && variables .insert(evaluate_name.clone(), variable.value.clone()) .is_none() - { - string_matches.push(StringMatchCandidate { - id: 0, - string: evaluate_name.clone(), - char_bag: evaluate_name.chars().collect(), - }); - } + { + string_matches.push(StringMatchCandidate { + id: 0, + string: evaluate_name.clone(), + char_bag: evaluate_name.chars().collect(), + }); } if variables @@ -694,7 +696,7 @@ impl ConsoleQueryBarCompletionProvider { new_bytes: &[u8], snapshot: &TextBufferSnapshot, ) -> Range { - let buffer_offset = buffer_position.to_offset(&snapshot); + let buffer_offset = buffer_position.to_offset(snapshot); let buffer_bytes = &buffer_text.as_bytes()[0..buffer_offset]; let mut prefix_len = 0; @@ -974,7 +976,7 @@ mod tests { &cx.buffer_text(), snapshot.anchor_before(buffer_position), replacement.as_bytes(), - &snapshot, + snapshot, ); cx.update_editor(|editor, _, cx| { diff --git a/crates/debugger_ui/src/session/running/loaded_source_list.rs b/crates/debugger_ui/src/session/running/loaded_source_list.rs index 6b376bb892e1ea5aae64a1d5873b91487e65f3c2..921ebd8b5f5bdfe8a3c8a8f7bb1625bd1ffad7fb 100644 --- a/crates/debugger_ui/src/session/running/loaded_source_list.rs +++ b/crates/debugger_ui/src/session/running/loaded_source_list.rs @@ -57,7 +57,7 @@ impl LoadedSourceList { h_flex() .text_ui_xs(cx) .text_color(cx.theme().colors().text_muted) - .when_some(source.path.clone(), |this, path| this.child(path)), + .when_some(source.path, |this, path| this.child(path)), ) .into_any() } diff --git a/crates/debugger_ui/src/session/running/memory_view.rs b/crates/debugger_ui/src/session/running/memory_view.rs index 75b8938371555374fccdb5d77a6a8cc07ebae0e0..e7b7963d3f0775c18489ceb306badce1725a6268 100644 --- a/crates/debugger_ui/src/session/running/memory_view.rs +++ b/crates/debugger_ui/src/session/running/memory_view.rs @@ -18,10 +18,8 @@ use project::debugger::{MemoryCell, dap_command::DataBreakpointContext, session: use settings::Settings; use theme::ThemeSettings; use ui::{ - ActiveTheme, AnyElement, App, Color, Context, ContextMenu, Div, Divider, DropdownMenu, Element, - FluentBuilder, Icon, IconName, InteractiveElement, IntoElement, Label, LabelCommon, - ParentElement, Pixels, PopoverMenuHandle, Render, Scrollbar, ScrollbarState, SharedString, - StatefulInteractiveElement, Styled, TextSize, Tooltip, Window, div, h_flex, px, v_flex, + ContextMenu, Divider, DropdownMenu, FluentBuilder, IntoElement, PopoverMenuHandle, Render, + Scrollbar, ScrollbarState, StatefulInteractiveElement, Tooltip, prelude::*, }; use workspace::Workspace; @@ -264,7 +262,7 @@ impl MemoryView { cx: &mut Context, ) { use parse_int::parse; - let Ok(as_address) = parse::(&memory_reference) else { + let Ok(as_address) = parse::(memory_reference) else { return; }; let access_size = evaluate_name @@ -463,7 +461,7 @@ impl MemoryView { let data_breakpoint_info = this.data_breakpoint_info(context.clone(), None, cx); cx.spawn(async move |this, cx| { if let Some(info) = data_breakpoint_info.await { - let Some(data_id) = info.data_id.clone() else { + let Some(data_id) = info.data_id else { return; }; _ = this.update(cx, |this, cx| { @@ -933,7 +931,7 @@ impl Render for MemoryView { v_flex() .size_full() .on_drag_move(cx.listener(|this, evt, _, _| { - this.handle_memory_drag(&evt); + this.handle_memory_drag(evt); })) .child(self.render_memory(cx).size_full()) .children(self.open_context_menu.as_ref().map(|(menu, position, _)| { diff --git a/crates/debugger_ui/src/session/running/module_list.rs b/crates/debugger_ui/src/session/running/module_list.rs index 74a9fb457a57cf2e70af694ed586af2227ee4a0a..7743cfbdee7bf200ab25aabad4cfc455dc8b3484 100644 --- a/crates/debugger_ui/src/session/running/module_list.rs +++ b/crates/debugger_ui/src/session/running/module_list.rs @@ -157,7 +157,7 @@ impl ModuleList { h_flex() .text_ui_xs(cx) .text_color(cx.theme().colors().text_muted) - .when_some(module.path.clone(), |this, path| this.child(path)), + .when_some(module.path, |this, path| this.child(path)), ) .into_any() } @@ -223,7 +223,7 @@ impl ModuleList { fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context) { let ix = match self.selected_ix { - _ if self.entries.len() == 0 => None, + _ if self.entries.is_empty() => None, None => Some(0), Some(ix) => { if ix == self.entries.len() - 1 { @@ -243,7 +243,7 @@ impl ModuleList { cx: &mut Context, ) { let ix = match self.selected_ix { - _ if self.entries.len() == 0 => None, + _ if self.entries.is_empty() => None, None => Some(self.entries.len() - 1), Some(ix) => { if ix == 0 { @@ -262,7 +262,7 @@ impl ModuleList { _window: &mut Window, cx: &mut Context, ) { - let ix = if self.entries.len() > 0 { + let ix = if !self.entries.is_empty() { Some(0) } else { None @@ -271,7 +271,7 @@ impl ModuleList { } fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { - let ix = if self.entries.len() > 0 { + let ix = if !self.entries.is_empty() { Some(self.entries.len() - 1) } else { None diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index 2149502f4a5774478555ef66139f163a85a37b3d..a4ea4ab654929f00b05e9146bfd662aad2f8bd6d 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -126,7 +126,7 @@ impl StackFrameList { self.stack_frames(cx) .unwrap_or_default() .into_iter() - .map(|stack_frame| stack_frame.dap.clone()) + .map(|stack_frame| stack_frame.dap) .collect() } @@ -224,7 +224,7 @@ impl StackFrameList { let collapsed_entries = std::mem::take(&mut collapsed_entries); if !collapsed_entries.is_empty() { - entries.push(StackFrameEntry::Collapsed(collapsed_entries.clone())); + entries.push(StackFrameEntry::Collapsed(collapsed_entries)); } self.entries = entries; @@ -418,7 +418,7 @@ impl StackFrameList { let source = stack_frame.source.clone(); let is_selected_frame = Some(ix) == self.selected_ix; - let path = source.clone().and_then(|s| s.path.or(s.name)); + let path = source.and_then(|s| s.path.or(s.name)); let formatted_path = path.map(|path| format!("{}:{}", path, stack_frame.line,)); let formatted_path = formatted_path.map(|path| { Label::new(path) @@ -493,7 +493,7 @@ impl StackFrameList { .child( IconButton::new( ("restart-stack-frame", stack_frame.id), - IconName::DebugRestart, + IconName::RotateCcw, ) .icon_size(IconSize::Small) .on_click(cx.listener({ @@ -621,7 +621,7 @@ impl StackFrameList { fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context) { let ix = match self.selected_ix { - _ if self.entries.len() == 0 => None, + _ if self.entries.is_empty() => None, None => Some(0), Some(ix) => { if ix == self.entries.len() - 1 { @@ -641,7 +641,7 @@ impl StackFrameList { cx: &mut Context, ) { let ix = match self.selected_ix { - _ if self.entries.len() == 0 => None, + _ if self.entries.is_empty() => None, None => Some(self.entries.len() - 1), Some(ix) => { if ix == 0 { @@ -660,7 +660,7 @@ impl StackFrameList { _window: &mut Window, cx: &mut Context, ) { - let ix = if self.entries.len() > 0 { + let ix = if !self.entries.is_empty() { Some(0) } else { None @@ -669,7 +669,7 @@ impl StackFrameList { } fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { - let ix = if self.entries.len() > 0 { + let ix = if !self.entries.is_empty() { Some(self.entries.len() - 1) } else { None diff --git a/crates/debugger_ui/src/session/running/variable_list.rs b/crates/debugger_ui/src/session/running/variable_list.rs index efbc72e8cfc9099a5d699493898440d17fbf615b..b396f0921e5fdf58959e82db54bb8d558249891c 100644 --- a/crates/debugger_ui/src/session/running/variable_list.rs +++ b/crates/debugger_ui/src/session/running/variable_list.rs @@ -272,7 +272,7 @@ impl VariableList { let mut entries = vec![]; let scopes: Vec<_> = self.session.update(cx, |session, cx| { - session.scopes(stack_frame_id, cx).iter().cloned().collect() + session.scopes(stack_frame_id, cx).to_vec() }); let mut contains_local_scope = false; @@ -291,7 +291,7 @@ impl VariableList { } self.session.update(cx, |session, cx| { - session.variables(scope.variables_reference, cx).len() > 0 + !session.variables(scope.variables_reference, cx).is_empty() }) }) .map(|scope| { @@ -313,7 +313,7 @@ impl VariableList { watcher.variables_reference, watcher.variables_reference, EntryPath::for_watcher(watcher.expression.clone()), - DapEntry::Watcher(watcher.clone()), + DapEntry::Watcher(watcher), ) }) .collect::>(), @@ -947,7 +947,7 @@ impl VariableList { #[track_caller] #[cfg(test)] pub(crate) fn assert_visual_entries(&self, expected: Vec<&str>) { - const INDENT: &'static str = " "; + const INDENT: &str = " "; let entries = &self.entries; let mut visual_entries = Vec::with_capacity(entries.len()); @@ -997,7 +997,7 @@ impl VariableList { DapEntry::Watcher { .. } => continue, DapEntry::Variable(dap) => scopes[idx].1.push(dap.clone()), DapEntry::Scope(scope) => { - if scopes.len() > 0 { + if !scopes.is_empty() { idx += 1; } @@ -1289,7 +1289,7 @@ impl VariableList { }), ) .child(self.render_variable_value( - &entry, + entry, &variable_color, watcher.value.to_string(), cx, @@ -1301,8 +1301,6 @@ impl VariableList { IconName::Close, ) .on_click({ - let weak = weak.clone(); - let path = path.clone(); move |_, window, cx| { weak.update(cx, |variable_list, cx| { variable_list.selection = Some(path.clone()); @@ -1470,7 +1468,6 @@ impl VariableList { })) }) .on_secondary_mouse_down(cx.listener({ - let path = path.clone(); let entry = variable.clone(); move |this, event: &MouseDownEvent, window, cx| { this.selection = Some(path.clone()); @@ -1494,7 +1491,7 @@ impl VariableList { }), ) .child(self.render_variable_value( - &variable, + variable, &variable_color, dap.value.clone(), cx, diff --git a/crates/debugger_ui/src/tests/attach_modal.rs b/crates/debugger_ui/src/tests/attach_modal.rs index 906a7a0d4bd76f0451d6b5d5cfa5beff0136c613..80e2b73d5a100bbd21462f0ad80def1997e184de 100644 --- a/crates/debugger_ui/src/tests/attach_modal.rs +++ b/crates/debugger_ui/src/tests/attach_modal.rs @@ -139,7 +139,7 @@ async fn test_show_attach_modal_and_select_process( workspace .update(cx, |_, window, cx| { let names = - attach_modal.update(cx, |modal, cx| attach_modal::_process_names(&modal, cx)); + attach_modal.update(cx, |modal, cx| attach_modal::_process_names(modal, cx)); // Initially all processes are visible. assert_eq!(3, names.len()); attach_modal.update(cx, |this, cx| { @@ -154,7 +154,7 @@ async fn test_show_attach_modal_and_select_process( workspace .update(cx, |_, _, cx| { let names = - attach_modal.update(cx, |modal, cx| attach_modal::_process_names(&modal, cx)); + attach_modal.update(cx, |modal, cx| attach_modal::_process_names(modal, cx)); // Initially all processes are visible. assert_eq!(2, names.len()); }) diff --git a/crates/debugger_ui/src/tests/debugger_panel.rs b/crates/debugger_ui/src/tests/debugger_panel.rs index 6180831ea9dccfb3c1ee861daac099e54b2242c3..ab6d5cb9605d5d774187f836130fdae66a8d3404 100644 --- a/crates/debugger_ui/src/tests/debugger_panel.rs +++ b/crates/debugger_ui/src/tests/debugger_panel.rs @@ -1330,7 +1330,6 @@ async fn test_unsetting_breakpoints_on_clear_breakpoint_action( let called_set_breakpoints = Arc::new(AtomicBool::new(false)); client.on_request::({ - let called_set_breakpoints = called_set_breakpoints.clone(); move |_, args| { assert!( args.breakpoints.is_none_or(|bps| bps.is_empty()), @@ -1445,7 +1444,6 @@ async fn test_we_send_arguments_from_user_config( let launch_handler_called = Arc::new(AtomicBool::new(false)); start_debug_session_with(&workspace, cx, debug_definition.clone(), { - let debug_definition = debug_definition.clone(); let launch_handler_called = launch_handler_called.clone(); move |client| { @@ -1783,9 +1781,8 @@ async fn test_debug_adapters_shutdown_on_app_quit( let disconnect_request_received = Arc::new(AtomicBool::new(false)); let disconnect_clone = disconnect_request_received.clone(); - let disconnect_clone_for_handler = disconnect_clone.clone(); client.on_request::(move |_, _| { - disconnect_clone_for_handler.store(true, Ordering::SeqCst); + disconnect_clone.store(true, Ordering::SeqCst); Ok(()) }); diff --git a/crates/debugger_ui/src/tests/new_process_modal.rs b/crates/debugger_ui/src/tests/new_process_modal.rs index d6b0dfa00429f9487eafbe38dca5f072ed547779..bfc445cf67329b7190f8e5b8d353415fb53fcd74 100644 --- a/crates/debugger_ui/src/tests/new_process_modal.rs +++ b/crates/debugger_ui/src/tests/new_process_modal.rs @@ -106,9 +106,7 @@ async fn test_debug_session_substitutes_variables_and_relativizes_paths( ); let expected_other_field = if input_path.contains("$ZED_WORKTREE_ROOT") { - input_path - .replace("$ZED_WORKTREE_ROOT", &path!("/test/worktree/path")) - .to_owned() + input_path.replace("$ZED_WORKTREE_ROOT", path!("/test/worktree/path")) } else { input_path.to_string() }; diff --git a/crates/debugger_ui/src/tests/variable_list.rs b/crates/debugger_ui/src/tests/variable_list.rs index fbbd52964105659c2cae645cec494824069f5529..4cfdae093f6a1464b178c053e629a6ebe6d76d02 100644 --- a/crates/debugger_ui/src/tests/variable_list.rs +++ b/crates/debugger_ui/src/tests/variable_list.rs @@ -1445,11 +1445,8 @@ async fn test_variable_list_only_sends_requests_when_rendering( cx.run_until_parked(); - let running_state = active_debug_session_panel(workspace, cx).update_in(cx, |item, _, _| { - let state = item.running_state().clone(); - - state - }); + let running_state = active_debug_session_panel(workspace, cx) + .update_in(cx, |item, _, _| item.running_state().clone()); client .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent { diff --git a/crates/diagnostics/src/diagnostic_renderer.rs b/crates/diagnostics/src/diagnostic_renderer.rs index ce7b253702a01e24e7f4a457ac418572e0fa2729..e9731f84ce258bbe55dc278eb4d2ddb0d6bab9ef 100644 --- a/crates/diagnostics/src/diagnostic_renderer.rs +++ b/crates/diagnostics/src/diagnostic_renderer.rs @@ -46,7 +46,7 @@ impl DiagnosticRenderer { markdown.push_str(" ("); } if let Some(source) = diagnostic.source.as_ref() { - markdown.push_str(&Markdown::escape(&source)); + markdown.push_str(&Markdown::escape(source)); } if diagnostic.source.is_some() && diagnostic.code.is_some() { markdown.push(' '); @@ -287,15 +287,13 @@ impl DiagnosticBlock { } } } - } else { - if let Some(diagnostic) = editor - .snapshot(window, cx) - .buffer_snapshot - .diagnostic_group(buffer_id, group_id) - .nth(ix) - { - Self::jump_to(editor, diagnostic.range, window, cx) - } + } else if let Some(diagnostic) = editor + .snapshot(window, cx) + .buffer_snapshot + .diagnostic_group(buffer_id, group_id) + .nth(ix) + { + Self::jump_to(editor, diagnostic.range, window, cx) }; } @@ -306,7 +304,7 @@ impl DiagnosticBlock { cx: &mut Context, ) { let snapshot = &editor.buffer().read(cx).snapshot(cx); - let range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + let range = range.start.to_offset(snapshot)..range.end.to_offset(snapshot); editor.unfold_ranges(&[range.start..range.end], true, false, cx); editor.change_selections(Default::default(), window, cx, |s| { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index e7660920da30ddcc088c2bbee6bfb1cf05d51d58..2e20118381de2e814ef2361280763eeab48606de 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -383,12 +383,10 @@ impl ProjectDiagnosticsEditor { } else { self.update_all_diagnostics(false, window, cx); } + } else if self.update_excerpts_task.is_some() { + self.update_excerpts_task = None; } else { - if self.update_excerpts_task.is_some() { - self.update_excerpts_task = None; - } else { - self.update_all_diagnostics(false, window, cx); - } + self.update_all_diagnostics(false, window, cx); } cx.notify(); } @@ -528,7 +526,7 @@ impl ProjectDiagnosticsEditor { lsp::DiagnosticSeverity::ERROR }; - cx.spawn_in(window, async move |this, mut cx| { + cx.spawn_in(window, async move |this, cx| { let diagnostics = buffer_snapshot .diagnostics_in_range::<_, text::Anchor>( Point::zero()..buffer_snapshot.max_point(), @@ -542,7 +540,7 @@ impl ProjectDiagnosticsEditor { return true; } this.diagnostics.insert(buffer_id, diagnostics.clone()); - return false; + false })?; if unchanged { return Ok(()); @@ -595,7 +593,7 @@ impl ProjectDiagnosticsEditor { b.initial_range.clone(), DEFAULT_MULTIBUFFER_CONTEXT, buffer_snapshot.clone(), - &mut cx, + cx, ) .await; let i = excerpt_ranges @@ -639,17 +637,15 @@ impl ProjectDiagnosticsEditor { #[cfg(test)] let cloned_blocks = blocks.clone(); - if was_empty { - if let Some(anchor_range) = anchor_ranges.first() { - let range_to_select = anchor_range.start..anchor_range.start; - this.editor.update(cx, |editor, cx| { - editor.change_selections(Default::default(), window, cx, |s| { - s.select_anchor_ranges([range_to_select]); - }) - }); - if this.focus_handle.is_focused(window) { - this.editor.read(cx).focus_handle(cx).focus(window); - } + if was_empty && let Some(anchor_range) = anchor_ranges.first() { + let range_to_select = anchor_range.start..anchor_range.start; + this.editor.update(cx, |editor, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_anchor_ranges([range_to_select]); + }) + }); + if this.focus_handle.is_focused(window) { + this.editor.read(cx).focus_handle(cx).focus(window); } } @@ -980,18 +976,16 @@ async fn heuristic_syntactic_expand( // Remove blank lines from start and end if let Some(start_row) = (outline_range.start.row..outline_range.end.row) .find(|row| !snapshot.line_indent_for_row(*row).is_line_blank()) - { - if let Some(end_row) = (outline_range.start.row..outline_range.end.row + 1) + && let Some(end_row) = (outline_range.start.row..outline_range.end.row + 1) .rev() .find(|row| !snapshot.line_indent_for_row(*row).is_line_blank()) - { - let row_count = end_row.saturating_sub(start_row); - if row_count <= max_row_count { - return Some(RangeInclusive::new( - outline_range.start.row, - outline_range.end.row, - )); - } + { + let row_count = end_row.saturating_sub(start_row); + if row_count <= max_row_count { + return Some(RangeInclusive::new( + outline_range.start.row, + outline_range.end.row, + )); } } } diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 8fb223b2cbfcc7db817059dd92bf1ff869846645..4a544f9ea718f0df037fb3012c48efec1c804b43 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -862,7 +862,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S 21..=50 => mutated_diagnostics.update_in(cx, |diagnostics, window, cx| { diagnostics.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); - if snapshot.buffer_snapshot.len() > 0 { + if !snapshot.buffer_snapshot.is_empty() { let position = rng.gen_range(0..snapshot.buffer_snapshot.len()); let position = snapshot.buffer_snapshot.clip_offset(position, Bias::Left); log::info!( @@ -971,7 +971,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext) let mut cx = EditorTestContext::new(cx).await; let lsp_store = - cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + cx.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).lsp_store()); cx.set_state(indoc! {" ˇfn func(abc def: i32) -> u32 { @@ -1065,7 +1065,7 @@ async fn cycle_through_same_place_diagnostics(cx: &mut TestAppContext) { let mut cx = EditorTestContext::new(cx).await; let lsp_store = - cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + cx.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).lsp_store()); cx.set_state(indoc! {" ˇfn func(abc def: i32) -> u32 { @@ -1239,7 +1239,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) { } "}); let lsp_store = - cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + cx.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).lsp_store()); cx.update(|_, cx| { lsp_store.update(cx, |lsp_store, cx| { @@ -1293,7 +1293,7 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext) fn «test»() { println!(); } "}); let lsp_store = - cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + cx.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).lsp_store()); cx.update(|_, cx| { lsp_store.update(cx, |lsp_store, cx| { lsp_store.update_diagnostics( @@ -1450,7 +1450,7 @@ async fn go_to_diagnostic_with_severity(cx: &mut TestAppContext) { let mut cx = EditorTestContext::new(cx).await; let lsp_store = - cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + cx.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).lsp_store()); cx.set_state(indoc! {"error warning info hiˇnt"}); diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index 9a7dcbe62fe302aa14bb9338bcab2301775a370c..e77b80115f2ffe6de512743d3eb00311052d7937 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -54,7 +54,7 @@ impl Render for ToolbarControls { .map(|div| { if is_updating { div.child( - IconButton::new("stop-updating", IconName::StopFilled) + IconButton::new("stop-updating", IconName::Stop) .icon_color(Color::Info) .shape(IconButtonShape::Square) .tooltip(Tooltip::for_action_title( @@ -73,7 +73,7 @@ impl Render for ToolbarControls { ) } else { div.child( - IconButton::new("refresh-diagnostics", IconName::Update) + IconButton::new("refresh-diagnostics", IconName::ArrowCircle) .icon_color(Color::Info) .shape(IconButtonShape::Square) .disabled(!has_stale_excerpts && !fetch_cargo_diagnostics) diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index 1448f4cb52369b5142856faabd67c0f9c3271220..c900eb692aee34b13f13f4fb67061b577b28be1d 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -8,7 +8,7 @@ use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use std::io::{self, Read}; use std::process; -use std::sync::LazyLock; +use std::sync::{LazyLock, OnceLock}; use util::paths::PathExt; static KEYMAP_MACOS: LazyLock = LazyLock::new(|| { @@ -21,7 +21,7 @@ static KEYMAP_LINUX: LazyLock = LazyLock::new(|| { static ALL_ACTIONS: LazyLock> = LazyLock::new(dump_all_gpui_actions); -const FRONT_MATTER_COMMENT: &'static str = ""; +const FRONT_MATTER_COMMENT: &str = ""; fn main() -> Result<()> { zlog::init(); @@ -61,15 +61,13 @@ impl PreprocessorError { for alias in action.deprecated_aliases { if alias == &action_name { return PreprocessorError::DeprecatedActionUsed { - used: action_name.clone(), + used: action_name, should_be: action.name.to_string(), }; } } } - PreprocessorError::ActionNotFound { - action_name: action_name.to_string(), - } + PreprocessorError::ActionNotFound { action_name } } } @@ -101,12 +99,13 @@ fn handle_preprocessing() -> Result<()> { let mut errors = HashSet::::new(); handle_frontmatter(&mut book, &mut errors); + template_big_table_of_actions(&mut book); template_and_validate_keybindings(&mut book, &mut errors); template_and_validate_actions(&mut book, &mut errors); if !errors.is_empty() { - const ANSI_RED: &'static str = "\x1b[31m"; - const ANSI_RESET: &'static str = "\x1b[0m"; + const ANSI_RED: &str = "\x1b[31m"; + const ANSI_RESET: &str = "\x1b[0m"; for error in &errors { eprintln!("{ANSI_RED}ERROR{ANSI_RESET}: {}", error); } @@ -129,7 +128,7 @@ fn handle_frontmatter(book: &mut Book, errors: &mut HashSet) let Some((name, value)) = line.split_once(':') else { errors.insert(PreprocessorError::InvalidFrontmatterLine(format!( "{}: {}", - chapter_breadcrumbs(&chapter), + chapter_breadcrumbs(chapter), line ))); continue; @@ -143,11 +142,20 @@ fn handle_frontmatter(book: &mut Book, errors: &mut HashSet) &serde_json::to_string(&metadata).expect("Failed to serialize metadata"), ) }); - match new_content { - Cow::Owned(content) => { - chapter.content = content; - } - Cow::Borrowed(_) => {} + if let Cow::Owned(content) = new_content { + chapter.content = content; + } + }); +} + +fn template_big_table_of_actions(book: &mut Book) { + for_each_chapter_mut(book, |chapter| { + let needle = "{#ACTIONS_TABLE#}"; + if let Some(start) = chapter.content.rfind(needle) { + chapter.content.replace_range( + start..start + needle.len(), + &generate_big_table_of_actions(), + ); } }); } @@ -282,6 +290,7 @@ struct ActionDef { name: &'static str, human_name: String, deprecated_aliases: &'static [&'static str], + docs: Option<&'static str>, } fn dump_all_gpui_actions() -> Vec { @@ -290,12 +299,13 @@ fn dump_all_gpui_actions() -> Vec { name: action.name, human_name: command_palette::humanize_action_name(action.name), deprecated_aliases: action.deprecated_aliases, + docs: action.documentation, }) .collect::>(); actions.sort_by_key(|a| a.name); - return actions; + actions } fn handle_postprocessing() -> Result<()> { @@ -388,7 +398,7 @@ fn handle_postprocessing() -> Result<()> { let meta_title = format!("{} | {}", page_title, meta_title); zlog::trace!(logger => "Updating {:?}", pretty_path(&file, &root_dir)); let contents = contents.replace("#description#", meta_description); - let contents = TITLE_REGEX + let contents = title_regex() .replace(&contents, |_: ®ex::Captures| { format!("{}", meta_title) }) @@ -402,21 +412,75 @@ fn handle_postprocessing() -> Result<()> { path: &'a std::path::PathBuf, root: &'a std::path::PathBuf, ) -> &'a std::path::Path { - &path.strip_prefix(&root).unwrap_or(&path) + path.strip_prefix(&root).unwrap_or(path) } - const TITLE_REGEX: std::cell::LazyCell = - std::cell::LazyCell::new(|| Regex::new(r"\s*(.*?)\s*").unwrap()); fn extract_title_from_page(contents: &str, pretty_path: &std::path::Path) -> String { - let title_tag_contents = &TITLE_REGEX - .captures(&contents) + let title_tag_contents = &title_regex() + .captures(contents) .with_context(|| format!("Failed to find title in {:?}", pretty_path)) .expect("Page has element")[1]; - let title = title_tag_contents + + title_tag_contents .trim() .strip_suffix("- Zed") .unwrap_or(title_tag_contents) .trim() - .to_string(); - title + .to_string() } } + +fn title_regex() -> &'static Regex { + static TITLE_REGEX: OnceLock<Regex> = OnceLock::new(); + TITLE_REGEX.get_or_init(|| Regex::new(r"<title>\s*(.*?)\s*").unwrap()) +} + +fn generate_big_table_of_actions() -> String { + let actions = &*ALL_ACTIONS; + let mut output = String::new(); + + let mut actions_sorted = actions.iter().collect::>(); + actions_sorted.sort_by_key(|a| a.name); + + // Start the definition list with custom styling for better spacing + output.push_str("
\n"); + + for action in actions_sorted.into_iter() { + // Add the humanized action name as the term with margin + output.push_str( + "
", + ); + output.push_str(&action.human_name); + output.push_str("
\n"); + + // Add the definition with keymap name and description + output.push_str("
\n"); + + // Add the description, escaping HTML if needed + if let Some(description) = action.docs { + output.push_str( + &description + .replace("&", "&") + .replace("<", "<") + .replace(">", ">"), + ); + output.push_str("
\n"); + } + output.push_str("Keymap Name: "); + output.push_str(action.name); + output.push_str("
\n"); + if !action.deprecated_aliases.is_empty() { + output.push_str("Deprecated Aliases:"); + for alias in action.deprecated_aliases.iter() { + output.push_str(""); + output.push_str(alias); + output.push_str(", "); + } + } + output.push_str("\n
\n"); + } + + // Close the definition list + output.push_str("
\n"); + + output +} diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index c8502f75de5adac0a1bfdcb8cd8fe4444bb70f84..964f2029340f425546f8816f94a604cabe2aa294 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -34,7 +34,7 @@ pub enum DataCollectionState { impl DataCollectionState { pub fn is_supported(&self) -> bool { - !matches!(self, DataCollectionState::Unsupported { .. }) + !matches!(self, DataCollectionState::Unsupported) } pub fn is_enabled(&self) -> bool { diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 3d3b43d71bc4a0914ed97dac24a278049f4c52f1..4f69af7ee414a664af623d1bf980520ade6a4a49 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -127,7 +127,7 @@ impl Render for EditPredictionButton { }), ); } - let this = cx.entity().clone(); + let this = cx.entity(); div().child( PopoverMenu::new("copilot") @@ -168,7 +168,7 @@ impl Render for EditPredictionButton { let account_status = agent.account_status.clone(); match account_status { AccountStatus::NeedsActivation { activate_url } => { - SupermavenButtonStatus::NeedsActivation(activate_url.clone()) + SupermavenButtonStatus::NeedsActivation(activate_url) } AccountStatus::Unknown => SupermavenButtonStatus::Initializing, AccountStatus::Ready => SupermavenButtonStatus::Ready, @@ -182,10 +182,10 @@ impl Render for EditPredictionButton { let icon = status.to_icon(); let tooltip_text = status.to_tooltip(); let has_menu = status.has_menu(); - let this = cx.entity().clone(); + let this = cx.entity(); let fs = self.fs.clone(); - return div().child( + div().child( PopoverMenu::new("supermaven") .menu(move |window, cx| match &status { SupermavenButtonStatus::NeedsActivation(activate_url) => { @@ -230,7 +230,7 @@ impl Render for EditPredictionButton { }, ) .with_handle(self.popover_menu_handle.clone()), - ); + ) } EditPredictionProvider::Zed => { @@ -331,7 +331,7 @@ impl Render for EditPredictionButton { }) }); - let this = cx.entity().clone(); + let this = cx.entity(); let mut popover_menu = PopoverMenu::new("zeta") .menu(move |window, cx| { @@ -343,7 +343,7 @@ impl Render for EditPredictionButton { let is_refreshing = self .edit_prediction_provider .as_ref() - .map_or(false, |provider| provider.is_refreshing(cx)); + .is_some_and(|provider| provider.is_refreshing(cx)); if is_refreshing { popover_menu = popover_menu.trigger( diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 39433b3c279e101f47ad4b2eed4d180f82a38997..ce02c4d2bf39c6bc5513280a1d81b071a9e6cd6a 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -273,6 +273,16 @@ pub enum UuidVersion { V7, } +/// Splits selection into individual lines. +#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)] +#[action(namespace = editor)] +#[serde(deny_unknown_fields)] +pub struct SplitSelectionIntoLines { + /// Keep the text selected after splitting instead of collapsing to cursors. + #[serde(default)] + pub keep_selections: bool, +} + /// Goes to the next diagnostic in the file. #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] #[action(namespace = editor)] @@ -672,8 +682,6 @@ actions!( SortLinesCaseInsensitive, /// Sorts selected lines case-sensitively. SortLinesCaseSensitive, - /// Splits selection into individual lines. - SplitSelectionIntoLines, /// Stops the language server for the current file. StopLanguageServer, /// Switches between source and header files. diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 3239fdc653e0e2acdbdaa3396e30c0546ef259cf..c78d4c83c01c49e6b1ff947d3cd53bc887424a16 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -13,7 +13,7 @@ use crate::{Editor, SwitchSourceHeader, element::register_action}; use project::lsp_store::clangd_ext::CLANGD_SERVER_NAME; fn is_c_language(language: &Language) -> bool { - return language.name() == "C++".into() || language.name() == "C".into(); + language.name() == "C++".into() || language.name() == "C".into() } pub fn switch_source_header( @@ -104,6 +104,6 @@ pub fn apply_related_actions(editor: &Entity, window: &mut Window, cx: & .filter_map(|buffer| buffer.read(cx).language()) .any(|language| is_c_language(language)) { - register_action(&editor, window, switch_source_header); + register_action(editor, window, switch_source_header); } } diff --git a/crates/editor/src/code_completion_tests.rs b/crates/editor/src/code_completion_tests.rs index fd8db29584d8eb6944ff674dd8bf5d860ce32428..a1d9f04a9c590ef1f20779bf19c2fe0be8905709 100644 --- a/crates/editor/src/code_completion_tests.rs +++ b/crates/editor/src/code_completion_tests.rs @@ -317,7 +317,7 @@ async fn filter_and_sort_matches( let candidates: Arc<[StringMatchCandidate]> = completions .iter() .enumerate() - .map(|(id, completion)| StringMatchCandidate::new(id, &completion.label.filter_text())) + .map(|(id, completion)| StringMatchCandidate::new(id, completion.label.filter_text())) .collect(); let cancel_flag = Arc::new(AtomicBool::new(false)); let background_executor = cx.executor(); @@ -331,5 +331,5 @@ async fn filter_and_sort_matches( background_executor, ) .await; - CompletionsMenu::sort_string_matches(matches, Some(query), snippet_sort_order, &completions) + CompletionsMenu::sort_string_matches(matches, Some(query), snippet_sort_order, completions) } diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 4ae2a14ca730dafa7cfecd9e9b3bacbe3f7bc47b..96809d68777ca8d84623c308bb8b06eec493a5be 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -321,7 +321,7 @@ impl CompletionsMenu { let match_candidates = choices .iter() .enumerate() - .map(|(id, completion)| StringMatchCandidate::new(id, &completion)) + .map(|(id, completion)| StringMatchCandidate::new(id, completion)) .collect(); let entries = choices .iter() @@ -514,7 +514,7 @@ impl CompletionsMenu { // Expand the range to resolve more completions than are predicted to be visible, to reduce // jank on navigation. let entry_indices = util::expanded_and_wrapped_usize_range( - entry_range.clone(), + entry_range, RESOLVE_BEFORE_ITEMS, RESOLVE_AFTER_ITEMS, entries.len(), @@ -1111,10 +1111,8 @@ impl CompletionsMenu { let query_start_doesnt_match_split_words = query_start_lower .map(|query_char| { !split_words(&string_match.string).any(|word| { - word.chars() - .next() - .and_then(|c| c.to_lowercase().next()) - .map_or(false, |word_char| word_char == query_char) + word.chars().next().and_then(|c| c.to_lowercase().next()) + == Some(query_char) }) }) .unwrap_or(false); diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index a16e516a70c9638965585cc5d6a23d8a9f67b639..c16e4a6ddbb971b44d71421d6ad868e6423eb035 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -969,13 +969,13 @@ impl DisplaySnapshot { if let Some(chunk_highlight) = chunk.highlight_style { // For color inlays, blend the color with the editor background let mut processed_highlight = chunk_highlight; - if chunk.is_inlay { - if let Some(inlay_color) = chunk_highlight.color { - // Only blend if the color has transparency (alpha < 1.0) - if inlay_color.a < 1.0 { - let blended_color = editor_style.background.blend(inlay_color); - processed_highlight.color = Some(blended_color); - } + if chunk.is_inlay + && let Some(inlay_color) = chunk_highlight.color + { + // Only blend if the color has transparency (alpha < 1.0) + if inlay_color.a < 1.0 { + let blended_color = editor_style.background.blend(inlay_color); + processed_highlight.color = Some(blended_color); } } @@ -991,7 +991,7 @@ impl DisplaySnapshot { if let Some(severity) = chunk.diagnostic_severity.filter(|severity| { self.diagnostics_max_severity .into_lsp() - .map_or(false, |max_severity| severity <= &max_severity) + .is_some_and(|max_severity| severity <= &max_severity) }) { if chunk.is_unnecessary { diagnostic_highlight.fade_out = Some(editor_style.unnecessary_code_fade); @@ -2351,11 +2351,12 @@ pub mod tests { .highlight_style .and_then(|style| style.color) .map_or(black, |color| color.to_rgb()); - if let Some((last_chunk, last_severity, last_color)) = chunks.last_mut() { - if *last_severity == chunk.diagnostic_severity && *last_color == color { - last_chunk.push_str(chunk.text); - continue; - } + if let Some((last_chunk, last_severity, last_color)) = chunks.last_mut() + && *last_severity == chunk.diagnostic_severity + && *last_color == color + { + last_chunk.push_str(chunk.text); + continue; } chunks.push((chunk.text.to_string(), chunk.diagnostic_severity, color)); @@ -2901,11 +2902,12 @@ pub mod tests { .syntax_highlight_id .and_then(|id| id.style(theme)?.color); let highlight_color = chunk.highlight_style.and_then(|style| style.color); - if let Some((last_chunk, last_syntax_color, last_highlight_color)) = chunks.last_mut() { - if syntax_color == *last_syntax_color && highlight_color == *last_highlight_color { - last_chunk.push_str(chunk.text); - continue; - } + if let Some((last_chunk, last_syntax_color, last_highlight_color)) = chunks.last_mut() + && syntax_color == *last_syntax_color + && highlight_color == *last_highlight_color + { + last_chunk.push_str(chunk.text); + continue; } chunks.push((chunk.text.to_string(), syntax_color, highlight_color)); } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index e25c02432d10c16969c963455188474e10ed04a6..b073fe7be75c82754de6ca7773b68073b213c49c 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -290,7 +290,10 @@ pub enum Block { ExcerptBoundary { excerpt: ExcerptInfo, height: u32, - starts_new_buffer: bool, + }, + BufferHeader { + excerpt: ExcerptInfo, + height: u32, }, } @@ -303,27 +306,37 @@ impl Block { .. } => BlockId::ExcerptBoundary(next_excerpt.id), Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id), + Block::BufferHeader { + excerpt: next_excerpt, + .. + } => BlockId::ExcerptBoundary(next_excerpt.id), } } pub fn has_height(&self) -> bool { match self { Block::Custom(block) => block.height.is_some(), - Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } => true, + Block::ExcerptBoundary { .. } + | Block::FoldedBuffer { .. } + | Block::BufferHeader { .. } => true, } } pub fn height(&self) -> u32 { match self { Block::Custom(block) => block.height.unwrap_or(0), - Block::ExcerptBoundary { height, .. } | Block::FoldedBuffer { height, .. } => *height, + Block::ExcerptBoundary { height, .. } + | Block::FoldedBuffer { height, .. } + | Block::BufferHeader { height, .. } => *height, } } pub fn style(&self) -> BlockStyle { match self { Block::Custom(block) => block.style, - Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } => BlockStyle::Sticky, + Block::ExcerptBoundary { .. } + | Block::FoldedBuffer { .. } + | Block::BufferHeader { .. } => BlockStyle::Sticky, } } @@ -332,6 +345,7 @@ impl Block { Block::Custom(block) => matches!(block.placement, BlockPlacement::Above(_)), Block::FoldedBuffer { .. } => false, Block::ExcerptBoundary { .. } => true, + Block::BufferHeader { .. } => true, } } @@ -340,6 +354,7 @@ impl Block { Block::Custom(block) => matches!(block.placement, BlockPlacement::Near(_)), Block::FoldedBuffer { .. } => false, Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => false, } } @@ -351,6 +366,7 @@ impl Block { ), Block::FoldedBuffer { .. } => false, Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => false, } } @@ -359,6 +375,7 @@ impl Block { Block::Custom(block) => matches!(block.placement, BlockPlacement::Replace(_)), Block::FoldedBuffer { .. } => true, Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => false, } } @@ -367,6 +384,7 @@ impl Block { Block::Custom(_) => false, Block::FoldedBuffer { .. } => true, Block::ExcerptBoundary { .. } => true, + Block::BufferHeader { .. } => true, } } @@ -374,9 +392,8 @@ impl Block { match self { Block::Custom(_) => false, Block::FoldedBuffer { .. } => true, - Block::ExcerptBoundary { - starts_new_buffer, .. - } => *starts_new_buffer, + Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => true, } } } @@ -393,14 +410,14 @@ impl Debug for Block { .field("first_excerpt", &first_excerpt) .field("height", height) .finish(), - Self::ExcerptBoundary { - starts_new_buffer, - excerpt, - height, - } => f + Self::ExcerptBoundary { excerpt, height } => f .debug_struct("ExcerptBoundary") .field("excerpt", excerpt) - .field("starts_new_buffer", starts_new_buffer) + .field("height", height) + .finish(), + Self::BufferHeader { excerpt, height } => f + .debug_struct("BufferHeader") + .field("excerpt", excerpt) .field("height", height) .finish(), } @@ -525,26 +542,22 @@ impl BlockMap { // * Below blocks that end at the start of the edit // However, if we hit a replace block that ends at the start of the edit we want to reconstruct it. new_transforms.append(cursor.slice(&old_start, Bias::Left), &()); - if let Some(transform) = cursor.item() { - if transform.summary.input_rows > 0 - && cursor.end() == old_start - && transform - .block - .as_ref() - .map_or(true, |b| !b.is_replacement()) - { - // Preserve the transform (push and next) - new_transforms.push(transform.clone(), &()); - cursor.next(); + if let Some(transform) = cursor.item() + && transform.summary.input_rows > 0 + && cursor.end() == old_start + && transform.block.as_ref().is_none_or(|b| !b.is_replacement()) + { + // Preserve the transform (push and next) + new_transforms.push(transform.clone(), &()); + cursor.next(); - // Preserve below blocks at end of edit - while let Some(transform) = cursor.item() { - if transform.block.as_ref().map_or(false, |b| b.place_below()) { - new_transforms.push(transform.clone(), &()); - cursor.next(); - } else { - break; - } + // Preserve below blocks at end of edit + while let Some(transform) = cursor.item() { + if transform.block.as_ref().is_some_and(|b| b.place_below()) { + new_transforms.push(transform.clone(), &()); + cursor.next(); + } else { + break; } } } @@ -607,7 +620,7 @@ impl BlockMap { // Discard below blocks at the end of the edit. They'll be reconstructed. while let Some(transform) = cursor.item() { - if transform.block.as_ref().map_or(false, |b| b.place_below()) { + if transform.block.as_ref().is_some_and(|b| b.place_below()) { cursor.next(); } else { break; @@ -657,22 +670,20 @@ impl BlockMap { .iter() .filter_map(|block| { let placement = block.placement.to_wrap_row(wrap_snapshot)?; - if let BlockPlacement::Above(row) = placement { - if row < new_start { - return None; - } + if let BlockPlacement::Above(row) = placement + && row < new_start + { + return None; } Some((placement, Block::Custom(block.clone()))) }), ); - if buffer.show_headers() { - blocks_in_edit.extend(self.header_and_footer_blocks( - buffer, - (start_bound, end_bound), - wrap_snapshot, - )); - } + blocks_in_edit.extend(self.header_and_footer_blocks( + buffer, + (start_bound, end_bound), + wrap_snapshot, + )); BlockMap::sort_blocks(&mut blocks_in_edit); @@ -775,7 +786,7 @@ impl BlockMap { if self.buffers_with_disabled_headers.contains(&new_buffer_id) { continue; } - if self.folded_buffers.contains(&new_buffer_id) { + if self.folded_buffers.contains(&new_buffer_id) && buffer.show_headers() { let mut last_excerpt_end_row = first_excerpt.end_row; while let Some(next_boundary) = boundaries.peek() { @@ -808,20 +819,24 @@ impl BlockMap { } } - if new_buffer_id.is_some() { + let starts_new_buffer = new_buffer_id.is_some(); + let block = if starts_new_buffer && buffer.show_headers() { height += self.buffer_header_height; - } else { + Block::BufferHeader { + excerpt: excerpt_boundary.next, + height, + } + } else if excerpt_boundary.prev.is_some() { height += self.excerpt_header_height; - } - - return Some(( - BlockPlacement::Above(WrapRow(wrap_row)), Block::ExcerptBoundary { excerpt: excerpt_boundary.next, height, - starts_new_buffer: new_buffer_id.is_some(), - }, - )); + } + } else { + continue; + }; + + return Some((BlockPlacement::Above(WrapRow(wrap_row)), block)); } }) } @@ -846,13 +861,25 @@ impl BlockMap { ( Block::ExcerptBoundary { excerpt: excerpt_a, .. + } + | Block::BufferHeader { + excerpt: excerpt_a, .. }, Block::ExcerptBoundary { excerpt: excerpt_b, .. + } + | Block::BufferHeader { + excerpt: excerpt_b, .. }, ) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)), - (Block::ExcerptBoundary { .. }, Block::Custom(_)) => Ordering::Less, - (Block::Custom(_), Block::ExcerptBoundary { .. }) => Ordering::Greater, + ( + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }, + Block::Custom(_), + ) => Ordering::Less, + ( + Block::Custom(_), + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }, + ) => Ordering::Greater, (Block::Custom(block_a), Block::Custom(block_b)) => block_a .priority .cmp(&block_b.priority) @@ -977,10 +1004,10 @@ impl BlockMapReader<'_> { break; } - if let Some(BlockId::Custom(id)) = transform.block.as_ref().map(|block| block.id()) { - if id == block_id { - return Some(cursor.start().1); - } + if let Some(BlockId::Custom(id)) = transform.block.as_ref().map(|block| block.id()) + && id == block_id + { + return Some(cursor.start().1); } cursor.next(); } @@ -1299,14 +1326,14 @@ impl BlockSnapshot { let mut input_start = transform_input_start; let mut input_end = transform_input_start; - if let Some(transform) = cursor.item() { - if transform.block.is_none() { - input_start += rows.start - transform_output_start; - input_end += cmp::min( - rows.end - transform_output_start, - transform.summary.input_rows, - ); - } + if let Some(transform) = cursor.item() + && transform.block.is_none() + { + input_start += rows.start - transform_output_start; + input_end += cmp::min( + rows.end - transform_output_start, + transform.summary.input_rows, + ); } BlockChunks { @@ -1329,7 +1356,7 @@ impl BlockSnapshot { let Dimensions(output_start, input_start, _) = cursor.start(); let overshoot = if cursor .item() - .map_or(false, |transform| transform.block.is_none()) + .is_some_and(|transform| transform.block.is_none()) { start_row.0 - output_start.0 } else { @@ -1359,7 +1386,7 @@ impl BlockSnapshot { && transform .block .as_ref() - .map_or(false, |block| block.height() > 0)) + .is_some_and(|block| block.height() > 0)) { break; } @@ -1381,7 +1408,9 @@ impl BlockSnapshot { while let Some(transform) = cursor.item() { match &transform.block { - Some(Block::ExcerptBoundary { excerpt, .. }) => { + Some( + Block::ExcerptBoundary { excerpt, .. } | Block::BufferHeader { excerpt, .. }, + ) => { return Some(StickyHeaderExcerpt { excerpt }); } Some(block) if block.is_buffer_header() => return None, @@ -1472,18 +1501,18 @@ impl BlockSnapshot { longest_row_chars = summary.longest_row_chars; } - if let Some(transform) = cursor.item() { - if transform.block.is_none() { - let Dimensions(output_start, input_start, _) = cursor.start(); - let overshoot = range.end.0 - output_start.0; - let wrap_start_row = input_start.0; - let wrap_end_row = input_start.0 + overshoot; - let summary = self - .wrap_snapshot - .text_summary_for_range(wrap_start_row..wrap_end_row); - if summary.longest_row_chars > longest_row_chars { - longest_row = BlockRow(output_start.0 + summary.longest_row); - } + if let Some(transform) = cursor.item() + && transform.block.is_none() + { + let Dimensions(output_start, input_start, _) = cursor.start(); + let overshoot = range.end.0 - output_start.0; + let wrap_start_row = input_start.0; + let wrap_end_row = input_start.0 + overshoot; + let summary = self + .wrap_snapshot + .text_summary_for_range(wrap_start_row..wrap_end_row); + if summary.longest_row_chars > longest_row_chars { + longest_row = BlockRow(output_start.0 + summary.longest_row); } } } @@ -1512,7 +1541,7 @@ impl BlockSnapshot { pub(super) fn is_block_line(&self, row: BlockRow) -> bool { let mut cursor = self.transforms.cursor::>(&()); cursor.seek(&row, Bias::Right); - cursor.item().map_or(false, |t| t.block.is_some()) + cursor.item().is_some_and(|t| t.block.is_some()) } pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool { @@ -1530,11 +1559,11 @@ impl BlockSnapshot { .make_wrap_point(Point::new(row.0, 0), Bias::Left); let mut cursor = self.transforms.cursor::>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right); - cursor.item().map_or(false, |transform| { + cursor.item().is_some_and(|transform| { transform .block .as_ref() - .map_or(false, |block| block.is_replacement()) + .is_some_and(|block| block.is_replacement()) }) } @@ -1557,12 +1586,11 @@ impl BlockSnapshot { match transform.block.as_ref() { Some(block) => { - if block.is_replacement() { - if ((bias == Bias::Left || search_left) && output_start <= point.0) - || (!search_left && output_start >= point.0) - { - return BlockPoint(output_start); - } + if block.is_replacement() + && (((bias == Bias::Left || search_left) && output_start <= point.0) + || (!search_left && output_start >= point.0)) + { + return BlockPoint(output_start); } } None => { @@ -1655,7 +1683,7 @@ impl BlockChunks<'_> { if transform .block .as_ref() - .map_or(false, |block| block.height() == 0) + .is_some_and(|block| block.height() == 0) { self.transforms.next(); } else { @@ -1666,7 +1694,7 @@ impl BlockChunks<'_> { if self .transforms .item() - .map_or(false, |transform| transform.block.is_none()) + .is_some_and(|transform| transform.block.is_none()) { let start_input_row = self.transforms.start().1.0; let start_output_row = self.transforms.start().0.0; @@ -1776,7 +1804,7 @@ impl Iterator for BlockRows<'_> { if transform .block .as_ref() - .map_or(false, |block| block.height() == 0) + .is_some_and(|block| block.height() == 0) { self.transforms.next(); } else { @@ -1788,7 +1816,7 @@ impl Iterator for BlockRows<'_> { if transform .block .as_ref() - .map_or(true, |block| block.is_replacement()) + .is_none_or(|block| block.is_replacement()) { self.input_rows.seek(self.transforms.start().1.0); } @@ -2161,7 +2189,7 @@ mod tests { } let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); - let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot.clone()); + let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx); @@ -2280,7 +2308,7 @@ mod tests { new_heights.insert(block_ids[0], 3); block_map_writer.resize(new_heights); - let snapshot = block_map.read(wraps_snapshot.clone(), Default::default()); + let snapshot = block_map.read(wraps_snapshot, Default::default()); // Same height as before, should remain the same assert_eq!(snapshot.text(), "aaa\n\n\n\n\n\nbbb\nccc\nddd\n\n\n"); } @@ -2290,8 +2318,6 @@ mod tests { fn test_blocks_on_wrapped_lines(cx: &mut gpui::TestAppContext) { cx.update(init_test); - let _font_id = cx.text_system().font_id(&font("Helvetica")).unwrap(); - let text = "one two three\nfour five six\nseven eight"; let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx)); @@ -2367,16 +2393,14 @@ mod tests { buffer.edit([(Point::new(2, 0)..Point::new(3, 0), "")], None, cx); buffer.snapshot(cx) }); - let (inlay_snapshot, inlay_edits) = inlay_map.sync( - buffer_snapshot.clone(), - buffer_subscription.consume().into_inner(), - ); + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot, buffer_subscription.consume().into_inner()); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tab_snapshot, tab_edits, cx) }); - let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); + let blocks_snapshot = block_map.read(wraps_snapshot, wrap_edits); assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\nline5"); let buffer_snapshot = buffer.update(cx, |buffer, cx| { @@ -2461,7 +2485,7 @@ mod tests { // Removing the replace block shows all the hidden blocks again. let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); writer.remove(HashSet::from_iter([replace_block_id])); - let blocks_snapshot = block_map.read(wraps_snapshot.clone(), Default::default()); + let blocks_snapshot = block_map.read(wraps_snapshot, Default::default()); assert_eq!( blocks_snapshot.text(), "\nline1\n\nline2\n\n\nline 2.1\nline2.2\nline 2.3\nline 2.4\n\nline4\n\nline5" @@ -2800,7 +2824,7 @@ mod tests { buffer.read_with(cx, |buffer, cx| { writer.fold_buffers([buffer_id_3], buffer, cx); }); - let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default()); + let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default()); let blocks = blocks_snapshot .blocks_in_range(0..u32::MAX) .collect::>(); @@ -2853,7 +2877,7 @@ mod tests { assert_eq!(buffer_ids.len(), 1); let buffer_id = buffer_ids[0]; - let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); let (_, wrap_snapshot) = @@ -2867,7 +2891,7 @@ mod tests { buffer.read_with(cx, |buffer, cx| { writer.fold_buffers([buffer_id], buffer, cx); }); - let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default()); + let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default()); let blocks = blocks_snapshot .blocks_in_range(0..u32::MAX) .collect::>(); @@ -2875,12 +2899,7 @@ mod tests { 1, blocks .iter() - .filter(|(_, block)| { - match block { - Block::FoldedBuffer { .. } => true, - _ => false, - } - }) + .filter(|(_, block)| { matches!(block, Block::FoldedBuffer { .. }) }) .count(), "Should have one folded block, producing a header of the second buffer" ); @@ -3197,9 +3216,9 @@ mod tests { // so we special case row 0 to assume a leading '\n'. // // Linehood is the birthright of strings. - let mut input_text_lines = input_text.split('\n').enumerate().peekable(); + let input_text_lines = input_text.split('\n').enumerate().peekable(); let mut block_row = 0; - while let Some((wrap_row, input_line)) = input_text_lines.next() { + for (wrap_row, input_line) in input_text_lines { let wrap_row = wrap_row as u32; let multibuffer_row = wraps_snapshot .to_point(WrapPoint::new(wrap_row, 0), Bias::Left) @@ -3230,34 +3249,32 @@ mod tests { let mut is_in_replace_block = false; if let Some((BlockPlacement::Replace(replace_range), block)) = sorted_blocks_iter.peek() + && wrap_row >= replace_range.start().0 { - if wrap_row >= replace_range.start().0 { - is_in_replace_block = true; + is_in_replace_block = true; - if wrap_row == replace_range.start().0 { - if matches!(block, Block::FoldedBuffer { .. }) { - expected_buffer_rows.push(None); - } else { - expected_buffer_rows - .push(input_buffer_rows[multibuffer_row as usize]); - } + if wrap_row == replace_range.start().0 { + if matches!(block, Block::FoldedBuffer { .. }) { + expected_buffer_rows.push(None); + } else { + expected_buffer_rows.push(input_buffer_rows[multibuffer_row as usize]); } + } - if wrap_row == replace_range.end().0 { - expected_block_positions.push((block_row, block.id())); - let text = "\n".repeat((block.height() - 1) as usize); - if block_row > 0 { - expected_text.push('\n'); - } - expected_text.push_str(&text); - - for _ in 1..block.height() { - expected_buffer_rows.push(None); - } - block_row += block.height(); + if wrap_row == replace_range.end().0 { + expected_block_positions.push((block_row, block.id())); + let text = "\n".repeat((block.height() - 1) as usize); + if block_row > 0 { + expected_text.push('\n'); + } + expected_text.push_str(&text); - sorted_blocks_iter.next(); + for _ in 1..block.height() { + expected_buffer_rows.push(None); } + block_row += block.height(); + + sorted_blocks_iter.next(); } } @@ -3541,7 +3558,7 @@ mod tests { ..buffer_snapshot.anchor_after(Point::new(1, 0))], false, ); - let blocks_snapshot = block_map.read(wraps_snapshot.clone(), Default::default()); + let blocks_snapshot = block_map.read(wraps_snapshot, Default::default()); assert_eq!(blocks_snapshot.text(), "abc\n\ndef\nghi\njkl\nmno"); } diff --git a/crates/editor/src/display_map/custom_highlights.rs b/crates/editor/src/display_map/custom_highlights.rs index ae69e9cf8c710acecc840ef14082c8f9d91d7c03..f3737ea4b7cc3f504b6288a2e4241977c8ffd20e 100644 --- a/crates/editor/src/display_map/custom_highlights.rs +++ b/crates/editor/src/display_map/custom_highlights.rs @@ -77,7 +77,7 @@ fn create_highlight_endpoints( let ranges = &text_highlights.1; let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe.end.cmp(&start, &buffer); + let cmp = probe.end.cmp(&start, buffer); if cmp.is_gt() { cmp::Ordering::Greater } else { @@ -88,18 +88,18 @@ fn create_highlight_endpoints( }; for range in &ranges[start_ix..] { - if range.start.cmp(&end, &buffer).is_ge() { + if range.start.cmp(&end, buffer).is_ge() { break; } highlight_endpoints.push(HighlightEndpoint { - offset: range.start.to_offset(&buffer), + offset: range.start.to_offset(buffer), is_start: true, tag, style, }); highlight_endpoints.push(HighlightEndpoint { - offset: range.end.to_offset(&buffer), + offset: range.end.to_offset(buffer), is_start: false, tag, style, diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index c4e53a0f4361d83429158f106bd81326c8ddb573..42f46fb74969301007d19032f1b96377d141a724 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -289,25 +289,25 @@ impl FoldMapWriter<'_> { let ChunkRendererId::Fold(id) = id else { continue; }; - if let Some(metadata) = self.0.snapshot.fold_metadata_by_id.get(&id).cloned() { - if Some(new_width) != metadata.width { - let buffer_start = metadata.range.start.to_offset(buffer); - let buffer_end = metadata.range.end.to_offset(buffer); - let inlay_range = inlay_snapshot.to_inlay_offset(buffer_start) - ..inlay_snapshot.to_inlay_offset(buffer_end); - edits.push(InlayEdit { - old: inlay_range.clone(), - new: inlay_range.clone(), - }); + if let Some(metadata) = self.0.snapshot.fold_metadata_by_id.get(&id).cloned() + && Some(new_width) != metadata.width + { + let buffer_start = metadata.range.start.to_offset(buffer); + let buffer_end = metadata.range.end.to_offset(buffer); + let inlay_range = inlay_snapshot.to_inlay_offset(buffer_start) + ..inlay_snapshot.to_inlay_offset(buffer_end); + edits.push(InlayEdit { + old: inlay_range.clone(), + new: inlay_range.clone(), + }); - self.0.snapshot.fold_metadata_by_id.insert( - id, - FoldMetadata { - range: metadata.range, - width: Some(new_width), - }, - ); - } + self.0.snapshot.fold_metadata_by_id.insert( + id, + FoldMetadata { + range: metadata.range, + width: Some(new_width), + }, + ); } } @@ -417,18 +417,18 @@ impl FoldMap { cursor.seek(&InlayOffset(0), Bias::Right); while let Some(mut edit) = inlay_edits_iter.next() { - if let Some(item) = cursor.item() { - if !item.is_fold() { - new_transforms.update_last( - |transform| { - if !transform.is_fold() { - transform.summary.add_summary(&item.summary, &()); - cursor.next(); - } - }, - &(), - ); - } + if let Some(item) = cursor.item() + && !item.is_fold() + { + new_transforms.update_last( + |transform| { + if !transform.is_fold() { + transform.summary.add_summary(&item.summary, &()); + cursor.next(); + } + }, + &(), + ); } new_transforms.append(cursor.slice(&edit.old.start, Bias::Left), &()); edit.new.start -= edit.old.start - *cursor.start(); @@ -491,14 +491,14 @@ impl FoldMap { while folds .peek() - .map_or(false, |(_, fold_range)| fold_range.start < edit.new.end) + .is_some_and(|(_, fold_range)| fold_range.start < edit.new.end) { let (fold, mut fold_range) = folds.next().unwrap(); let sum = new_transforms.summary(); assert!(fold_range.start.0 >= sum.input.len); - while folds.peek().map_or(false, |(next_fold, next_fold_range)| { + while folds.peek().is_some_and(|(next_fold, next_fold_range)| { next_fold_range.start < fold_range.end || (next_fold_range.start == fold_range.end && fold.placeholder.merge_adjacent @@ -575,14 +575,14 @@ impl FoldMap { for mut edit in inlay_edits { old_transforms.seek(&edit.old.start, Bias::Left); - if old_transforms.item().map_or(false, |t| t.is_fold()) { + if old_transforms.item().is_some_and(|t| t.is_fold()) { edit.old.start = old_transforms.start().0; } let old_start = old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0; old_transforms.seek_forward(&edit.old.end, Bias::Right); - if old_transforms.item().map_or(false, |t| t.is_fold()) { + if old_transforms.item().is_some_and(|t| t.is_fold()) { old_transforms.next(); edit.old.end = old_transforms.start().0; } @@ -590,14 +590,14 @@ impl FoldMap { old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0; new_transforms.seek(&edit.new.start, Bias::Left); - if new_transforms.item().map_or(false, |t| t.is_fold()) { + if new_transforms.item().is_some_and(|t| t.is_fold()) { edit.new.start = new_transforms.start().0; } let new_start = new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0; new_transforms.seek_forward(&edit.new.end, Bias::Right); - if new_transforms.item().map_or(false, |t| t.is_fold()) { + if new_transforms.item().is_some_and(|t| t.is_fold()) { new_transforms.next(); edit.new.end = new_transforms.start().0; } @@ -709,7 +709,7 @@ impl FoldSnapshot { .transforms .cursor::>(&()); cursor.seek(&point, Bias::Right); - if cursor.item().map_or(false, |t| t.is_fold()) { + if cursor.item().is_some_and(|t| t.is_fold()) { if bias == Bias::Left || point == cursor.start().0 { cursor.start().1 } else { @@ -788,7 +788,7 @@ impl FoldSnapshot { let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_offset, Bias::Right); - cursor.item().map_or(false, |t| t.placeholder.is_some()) + cursor.item().is_some_and(|t| t.placeholder.is_some()) } pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool { @@ -839,7 +839,7 @@ impl FoldSnapshot { let inlay_end = if transform_cursor .item() - .map_or(true, |transform| transform.is_fold()) + .is_none_or(|transform| transform.is_fold()) { inlay_start } else if range.end < transform_end.0 { @@ -1348,7 +1348,7 @@ impl FoldChunks<'_> { let inlay_end = if self .transform_cursor .item() - .map_or(true, |transform| transform.is_fold()) + .is_none_or(|transform| transform.is_fold()) { inlay_start } else if range.end < transform_end.0 { @@ -1463,7 +1463,7 @@ impl FoldOffset { .transforms .cursor::>(&()); cursor.seek(&self, Bias::Right); - let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { + let overshoot = if cursor.item().is_none_or(|t| t.is_fold()) { Point::new(0, (self.0 - cursor.start().0.0) as u32) } else { let inlay_offset = cursor.start().1.input.len + self.0 - cursor.start().0.0; @@ -1557,7 +1557,7 @@ mod tests { let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot); let mut map = FoldMap::new(inlay_snapshot.clone()).0; let (mut writer, _, _) = map.write(inlay_snapshot, vec![]); @@ -1636,7 +1636,7 @@ mod tests { let buffer = MultiBuffer::build_simple("abcdefghijkl", cx); let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot); { let mut map = FoldMap::new(inlay_snapshot.clone()).0; @@ -1712,7 +1712,7 @@ mod tests { let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot); let mut map = FoldMap::new(inlay_snapshot.clone()).0; let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); @@ -1720,7 +1720,7 @@ mod tests { (Point::new(0, 2)..Point::new(2, 2), FoldPlaceholder::test()), (Point::new(3, 1)..Point::new(4, 1), FoldPlaceholder::test()), ]); - let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + let (snapshot, _) = map.read(inlay_snapshot, vec![]); assert_eq!(snapshot.text(), "aa⋯cccc\nd⋯eeeee"); let buffer_snapshot = buffer.update(cx, |buffer, cx| { @@ -1747,7 +1747,7 @@ mod tests { (Point::new(1, 2)..Point::new(3, 2), FoldPlaceholder::test()), (Point::new(3, 1)..Point::new(4, 1), FoldPlaceholder::test()), ]); - let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + let (snapshot, _) = map.read(inlay_snapshot, vec![]); let fold_ranges = snapshot .folds_in_range(Point::new(1, 0)..Point::new(1, 3)) .map(|fold| { @@ -1782,7 +1782,7 @@ mod tests { let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let mut map = FoldMap::new(inlay_snapshot.clone()).0; - let (mut initial_snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + let (mut initial_snapshot, _) = map.read(inlay_snapshot, vec![]); let mut snapshot_edits = Vec::new(); let mut next_inlay_id = 0; diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index b296b3e62a39aa2ec8671676e051e94f5f9622cf..3db9d10fdc74f418ecd4ea682dde91185130cd46 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -48,7 +48,7 @@ pub struct Inlay { impl Inlay { pub fn hint(id: usize, position: Anchor, hint: &project::InlayHint) -> Self { let mut text = hint.text(); - if hint.padding_right && text.chars_at(text.len().saturating_sub(1)).next() != Some(' ') { + if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { text.push(" "); } if hint.padding_left && text.chars_at(0).next() != Some(' ') { @@ -557,11 +557,11 @@ impl InlayMap { let mut buffer_edits_iter = buffer_edits.iter().peekable(); while let Some(buffer_edit) = buffer_edits_iter.next() { new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left), &()); - if let Some(Transform::Isomorphic(transform)) = cursor.item() { - if cursor.end().0 == buffer_edit.old.start { - push_isomorphic(&mut new_transforms, *transform); - cursor.next(); - } + if let Some(Transform::Isomorphic(transform)) = cursor.item() + && cursor.end().0 == buffer_edit.old.start + { + push_isomorphic(&mut new_transforms, *transform); + cursor.next(); } // Remove all the inlays and transforms contained by the edit. @@ -625,7 +625,7 @@ impl InlayMap { // we can push its remainder. if buffer_edits_iter .peek() - .map_or(true, |edit| edit.old.start >= cursor.end().0) + .is_none_or(|edit| edit.old.start >= cursor.end().0) { let transform_start = new_transforms.summary().input.len; let transform_end = @@ -1305,6 +1305,29 @@ mod tests { ); } + #[gpui::test] + fn test_inlay_hint_padding_with_multibyte_chars() { + assert_eq!( + Inlay::hint( + 0, + Anchor::min(), + &InlayHint { + label: InlayHintLabel::String("🎨".to_string()), + position: text::Anchor::default(), + padding_left: true, + padding_right: true, + tooltip: None, + kind: None, + resolve_state: ResolveState::Resolved, + }, + ) + .text + .to_string(), + " 🎨 ", + "Should pad single emoji correctly" + ); + } + #[gpui::test] fn test_basic_inlays(cx: &mut App) { let buffer = MultiBuffer::build_simple("abcdefghi", cx); diff --git a/crates/editor/src/display_map/invisibles.rs b/crates/editor/src/display_map/invisibles.rs index 199986f2a41c82894acc0259be578a28e785a235..0712ddf9e2e53c22081c6fa63ebb4baeced37f78 100644 --- a/crates/editor/src/display_map/invisibles.rs +++ b/crates/editor/src/display_map/invisibles.rs @@ -36,8 +36,8 @@ pub fn is_invisible(c: char) -> bool { } else if c >= '\u{7f}' { c <= '\u{9f}' || (c.is_whitespace() && c != IDEOGRAPHIC_SPACE) - || contains(c, &FORMAT) - || contains(c, &OTHER) + || contains(c, FORMAT) + || contains(c, OTHER) } else { false } @@ -50,7 +50,7 @@ pub fn replacement(c: char) -> Option<&'static str> { Some(C0_SYMBOLS[c as usize]) } else if c == '\x7f' { Some(DEL) - } else if contains(c, &PRESERVE) { + } else if contains(c, PRESERVE) { None } else { Some("\u{2007}") // fixed width space @@ -61,14 +61,14 @@ pub fn replacement(c: char) -> Option<&'static str> { // but could if we tracked state in the classifier. const IDEOGRAPHIC_SPACE: char = '\u{3000}'; -const C0_SYMBOLS: &'static [&'static str] = &[ +const C0_SYMBOLS: &[&str] = &[ "␀", "␁", "␂", "␃", "␄", "␅", "␆", "␇", "␈", "␉", "␊", "␋", "␌", "␍", "␎", "␏", "␐", "␑", "␒", "␓", "␔", "␕", "␖", "␗", "␘", "␙", "␚", "␛", "␜", "␝", "␞", "␟", ]; -const DEL: &'static str = "␡"; +const DEL: &str = "␡"; // generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0 -pub const FORMAT: &'static [(char, char)] = &[ +pub const FORMAT: &[(char, char)] = &[ ('\u{ad}', '\u{ad}'), ('\u{600}', '\u{605}'), ('\u{61c}', '\u{61c}'), @@ -93,7 +93,7 @@ pub const FORMAT: &'static [(char, char)] = &[ ]; // hand-made base on https://invisible-characters.com (Excluding Cf) -pub const OTHER: &'static [(char, char)] = &[ +pub const OTHER: &[(char, char)] = &[ ('\u{034f}', '\u{034f}'), ('\u{115F}', '\u{1160}'), ('\u{17b4}', '\u{17b5}'), @@ -107,7 +107,7 @@ pub const OTHER: &'static [(char, char)] = &[ ]; // a subset of FORMAT/OTHER that may appear within glyphs -const PRESERVE: &'static [(char, char)] = &[ +const PRESERVE: &[(char, char)] = &[ ('\u{034f}', '\u{034f}'), ('\u{200d}', '\u{200d}'), ('\u{17b4}', '\u{17b5}'), diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index eb5d57d48472bdd4b2d4f150c40da05c7e422e19..6f5df9bb8e658b95260dde4feb2b00c177c98520 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -116,7 +116,7 @@ impl TabMap { state.new.end = edit.new.end; Some(None) // Skip this edit, it's merged } else { - let new_state = edit.clone(); + let new_state = edit; let result = Some(Some(state.clone())); // Yield the previous edit **state = new_state; result @@ -611,7 +611,7 @@ mod tests { fn test_expand_tabs(cx: &mut gpui::App) { let buffer = MultiBuffer::build_simple("", cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); @@ -628,7 +628,7 @@ mod tests { let buffer = MultiBuffer::build_simple(input, cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); @@ -675,7 +675,7 @@ mod tests { let buffer = MultiBuffer::build_simple(input, cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); @@ -689,7 +689,7 @@ mod tests { let buffer = MultiBuffer::build_simple(input, cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); - let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); @@ -749,7 +749,7 @@ mod tests { let buffer_snapshot = buffer.read(cx).snapshot(cx); log::info!("Buffer text: {:?}", buffer_snapshot.text()); - let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot); log::info!("InlayMap text: {:?}", inlay_snapshot.text()); let (mut fold_map, _) = FoldMap::new(inlay_snapshot.clone()); fold_map.randomly_mutate(&mut rng); @@ -758,7 +758,7 @@ mod tests { let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng); log::info!("InlayMap text: {:?}", inlay_snapshot.text()); - let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size); + let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size); let tabs_snapshot = tab_map.set_max_expansion_column(32); let text = text::Rope::from(tabs_snapshot.text().as_str()); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 269f8f0c409cee7fa87b64cccb5310e4beb1edd2..500ec3a0bb77f8a8332e86485b81b357644e6d23 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -74,10 +74,10 @@ impl WrapRows<'_> { self.transforms .seek(&WrapPoint::new(start_row, 0), Bias::Left); let mut input_row = self.transforms.start().1.row(); - if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { + if self.transforms.item().is_some_and(|t| t.is_isomorphic()) { input_row += start_row - self.transforms.start().0.row(); } - self.soft_wrapped = self.transforms.item().map_or(false, |t| !t.is_isomorphic()); + self.soft_wrapped = self.transforms.item().is_some_and(|t| !t.is_isomorphic()); self.input_buffer_rows.seek(input_row); self.input_buffer_row = self.input_buffer_rows.next().unwrap(); self.output_row = start_row; @@ -249,48 +249,48 @@ impl WrapMap { return; } - if let Some(wrap_width) = self.wrap_width { - if self.background_task.is_none() { - let pending_edits = self.pending_edits.clone(); - let mut snapshot = self.snapshot.clone(); - let text_system = cx.text_system().clone(); - let (font, font_size) = self.font_with_size.clone(); - let update_task = cx.background_spawn(async move { - let mut edits = Patch::default(); - let mut line_wrapper = text_system.line_wrapper(font, font_size); - for (tab_snapshot, tab_edits) in pending_edits { - let wrap_edits = snapshot - .update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper) - .await; - edits = edits.compose(&wrap_edits); - } - (snapshot, edits) - }); + if let Some(wrap_width) = self.wrap_width + && self.background_task.is_none() + { + let pending_edits = self.pending_edits.clone(); + let mut snapshot = self.snapshot.clone(); + let text_system = cx.text_system().clone(); + let (font, font_size) = self.font_with_size.clone(); + let update_task = cx.background_spawn(async move { + let mut edits = Patch::default(); + let mut line_wrapper = text_system.line_wrapper(font, font_size); + for (tab_snapshot, tab_edits) in pending_edits { + let wrap_edits = snapshot + .update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper) + .await; + edits = edits.compose(&wrap_edits); + } + (snapshot, edits) + }); - match cx - .background_executor() - .block_with_timeout(Duration::from_millis(1), update_task) - { - Ok((snapshot, output_edits)) => { - self.snapshot = snapshot; - self.edits_since_sync = self.edits_since_sync.compose(&output_edits); - } - Err(update_task) => { - self.background_task = Some(cx.spawn(async move |this, cx| { - let (snapshot, edits) = update_task.await; - this.update(cx, |this, cx| { - this.snapshot = snapshot; - this.edits_since_sync = this - .edits_since_sync - .compose(mem::take(&mut this.interpolated_edits).invert()) - .compose(&edits); - this.background_task = None; - this.flush_edits(cx); - cx.notify(); - }) - .ok(); - })); - } + match cx + .background_executor() + .block_with_timeout(Duration::from_millis(1), update_task) + { + Ok((snapshot, output_edits)) => { + self.snapshot = snapshot; + self.edits_since_sync = self.edits_since_sync.compose(&output_edits); + } + Err(update_task) => { + self.background_task = Some(cx.spawn(async move |this, cx| { + let (snapshot, edits) = update_task.await; + this.update(cx, |this, cx| { + this.snapshot = snapshot; + this.edits_since_sync = this + .edits_since_sync + .compose(mem::take(&mut this.interpolated_edits).invert()) + .compose(&edits); + this.background_task = None; + this.flush_edits(cx); + cx.notify(); + }) + .ok(); + })); } } } @@ -603,7 +603,7 @@ impl WrapSnapshot { .cursor::>(&()); transforms.seek(&output_start, Bias::Right); let mut input_start = TabPoint(transforms.start().1.0); - if transforms.item().map_or(false, |t| t.is_isomorphic()) { + if transforms.item().is_some_and(|t| t.is_isomorphic()) { input_start.0 += output_start.0 - transforms.start().0.0; } let input_end = self @@ -634,7 +634,7 @@ impl WrapSnapshot { cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left); if cursor .item() - .map_or(false, |transform| transform.is_isomorphic()) + .is_some_and(|transform| transform.is_isomorphic()) { let overshoot = row - cursor.start().0.row(); let tab_row = cursor.start().1.row() + overshoot; @@ -732,10 +732,10 @@ impl WrapSnapshot { .cursor::>(&()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left); let mut input_row = transforms.start().1.row(); - if transforms.item().map_or(false, |t| t.is_isomorphic()) { + if transforms.item().is_some_and(|t| t.is_isomorphic()) { input_row += start_row - transforms.start().0.row(); } - let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic()); + let soft_wrapped = transforms.item().is_some_and(|t| !t.is_isomorphic()); let mut input_buffer_rows = self.tab_snapshot.rows(input_row); let input_buffer_row = input_buffer_rows.next().unwrap(); WrapRows { @@ -754,7 +754,7 @@ impl WrapSnapshot { .cursor::>(&()); cursor.seek(&point, Bias::Right); let mut tab_point = cursor.start().1.0; - if cursor.item().map_or(false, |t| t.is_isomorphic()) { + if cursor.item().is_some_and(|t| t.is_isomorphic()) { tab_point += point.0 - cursor.start().0.0; } TabPoint(tab_point) @@ -780,7 +780,7 @@ impl WrapSnapshot { if bias == Bias::Left { let mut cursor = self.transforms.cursor::(&()); cursor.seek(&point, Bias::Right); - if cursor.item().map_or(false, |t| !t.is_isomorphic()) { + if cursor.item().is_some_and(|t| !t.is_isomorphic()) { point = *cursor.start(); *point.column_mut() -= 1; } @@ -901,7 +901,7 @@ impl WrapChunks<'_> { let output_end = WrapPoint::new(rows.end, 0); self.transforms.seek(&output_start, Bias::Right); let mut input_start = TabPoint(self.transforms.start().1.0); - if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { + if self.transforms.item().is_some_and(|t| t.is_isomorphic()) { input_start.0 += output_start.0 - self.transforms.start().0.0; } let input_end = self @@ -993,7 +993,7 @@ impl Iterator for WrapRows<'_> { self.output_row += 1; self.transforms .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left); - if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { + if self.transforms.item().is_some_and(|t| t.is_isomorphic()) { self.input_buffer_row = self.input_buffer_rows.next().unwrap(); self.soft_wrapped = false; } else { @@ -1065,12 +1065,12 @@ impl sum_tree::Item for Transform { } fn push_isomorphic(transforms: &mut Vec, summary: TextSummary) { - if let Some(last_transform) = transforms.last_mut() { - if last_transform.is_isomorphic() { - last_transform.summary.input += &summary; - last_transform.summary.output += &summary; - return; - } + if let Some(last_transform) = transforms.last_mut() + && last_transform.is_isomorphic() + { + last_transform.summary.input += &summary; + last_transform.summary.output += &summary; + return; } transforms.push(Transform::isomorphic(summary)); } @@ -1223,7 +1223,7 @@ mod tests { let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); let font = test_font(); - let _font_id = text_system.font_id(&font); + let _font_id = text_system.resolve_font(&font); let font_size = px(14.0); log::info!("Tab size: {}", tab_size); @@ -1461,7 +1461,7 @@ mod tests { } let mut prev_ix = 0; - for boundary in line_wrapper.wrap_line(&[LineFragment::text(&line)], wrap_width) { + for boundary in line_wrapper.wrap_line(&[LineFragment::text(line)], wrap_width) { wrapped_text.push_str(&line[prev_ix..boundary.ix]); wrapped_text.push('\n'); wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize)); diff --git a/crates/editor/src/edit_prediction_tests.rs b/crates/editor/src/edit_prediction_tests.rs index 7bf51e45d72f383b4af34cf6ad493792f8e9d351..bba632e81f77ba91927abd1c0e3448a732e1c6f5 100644 --- a/crates/editor/src/edit_prediction_tests.rs +++ b/crates/editor/src/edit_prediction_tests.rs @@ -7,7 +7,9 @@ use std::ops::Range; use text::{Point, ToOffset}; use crate::{ - EditPrediction, editor_tests::init_test, test::editor_test_context::EditorTestContext, + EditPrediction, + editor_tests::{init_test, update_test_language_settings}, + test::editor_test_context::EditorTestContext, }; #[gpui::test] @@ -271,6 +273,44 @@ async fn test_edit_prediction_jump_disabled_for_non_zed_providers(cx: &mut gpui: }); } +#[gpui::test] +async fn test_edit_predictions_disabled_in_scope(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + update_test_language_settings(cx, |settings| { + settings.defaults.edit_predictions_disabled_in = Some(vec!["string".to_string()]); + }); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new(|_| FakeEditPredictionProvider::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + + let language = languages::language("javascript", tree_sitter_typescript::LANGUAGE_TSX.into()); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // Test disabled inside of string + cx.set_state("const x = \"hello ˇworld\";"); + propose_edits(&provider, vec![(17..17, "beautiful ")], &mut cx); + cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx)); + cx.editor(|editor, _, _| { + assert!( + editor.active_edit_prediction.is_none(), + "Edit predictions should be disabled in string scopes when configured in edit_predictions_disabled_in" + ); + }); + + // Test enabled outside of string + cx.set_state("const x = \"hello world\"; ˇ"); + propose_edits(&provider, vec![(24..24, "// comment")], &mut cx); + cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx)); + cx.editor(|editor, _, _| { + assert!( + editor.active_edit_prediction.is_some(), + "Edit predictions should work outside of disabled scopes" + ); + }); +} + fn assert_editor_active_edit_completion( cx: &mut EditorTestContext, assert: impl FnOnce(MultiBufferSnapshot, &Vec<(Range, String)>), diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d1bf95c794a2c388334275b94a1201f6a733c632..2af8e6c0e4d235bd136d1f24a66645af2a6258cd 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -250,6 +250,24 @@ pub type RenderDiffHunkControlsFn = Arc< ) -> AnyElement, >; +enum ReportEditorEvent { + Saved { auto_saved: bool }, + EditorOpened, + ZetaTosClicked, + Closed, +} + +impl ReportEditorEvent { + pub fn event_type(&self) -> &'static str { + match self { + Self::Saved { .. } => "Editor Saved", + Self::EditorOpened => "Editor Opened", + Self::ZetaTosClicked => "Edit Prediction Provider ToS Clicked", + Self::Closed => "Editor Closed", + } + } +} + struct InlineValueCache { enabled: bool, inlays: Vec, @@ -764,10 +782,7 @@ impl MinimapVisibility { } fn disabled(&self) -> bool { - match *self { - Self::Disabled => true, - _ => false, - } + matches!(*self, Self::Disabled) } fn settings_visibility(&self) -> bool { @@ -924,10 +939,10 @@ impl ChangeList { } pub fn invert_last_group(&mut self) { - if let Some(last) = self.changes.last_mut() { - if let Some(current) = last.current.as_mut() { - mem::swap(&mut last.original, current); - } + if let Some(last) = self.changes.last_mut() + && let Some(current) = last.current.as_mut() + { + mem::swap(&mut last.original, current); } } } @@ -1021,9 +1036,7 @@ pub struct Editor { inline_diagnostics: Vec<(Anchor, InlineDiagnostic)>, soft_wrap_mode_override: Option, hard_wrap: Option, - - // TODO: make this a access method - pub project: Option>, + project: Option>, semantics_provider: Option>, completion_provider: Option>, collaboration_hub: Option>, @@ -1413,7 +1426,7 @@ impl SelectionHistory { if self .undo_stack .back() - .map_or(true, |e| e.selections != entry.selections) + .is_none_or(|e| e.selections != entry.selections) { self.undo_stack.push_back(entry); if self.undo_stack.len() > MAX_SELECTION_HISTORY_LEN { @@ -1426,7 +1439,7 @@ impl SelectionHistory { if self .redo_stack .back() - .map_or(true, |e| e.selections != entry.selections) + .is_none_or(|e| e.selections != entry.selections) { self.redo_stack.push_back(entry); if self.redo_stack.len() > MAX_SELECTION_HISTORY_LEN { @@ -1841,118 +1854,166 @@ impl Editor { blink_manager }); - let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. }) - .then(|| language_settings::SoftWrap::None); + let soft_wrap_mode_override = + matches!(mode, EditorMode::SingleLine).then(|| language_settings::SoftWrap::None); let mut project_subscriptions = Vec::new(); - if full_mode { - if let Some(project) = project.as_ref() { - project_subscriptions.push(cx.subscribe_in( - project, - window, - |editor, _, event, window, cx| match event { - project::Event::RefreshCodeLens => { - // we always query lens with actions, without storing them, always refreshing them - } - project::Event::RefreshInlayHints => { - editor - .refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx); - } - project::Event::LanguageServerAdded(..) - | project::Event::LanguageServerRemoved(..) => { - if editor.tasks_update_task.is_none() { - editor.tasks_update_task = - Some(editor.refresh_runnables(window, cx)); - } + if full_mode && let Some(project) = project.as_ref() { + project_subscriptions.push(cx.subscribe_in( + project, + window, + |editor, _, event, window, cx| match event { + project::Event::RefreshCodeLens => { + // we always query lens with actions, without storing them, always refreshing them + } + project::Event::RefreshInlayHints => { + editor.refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx); + } + project::Event::LanguageServerAdded(..) + | project::Event::LanguageServerRemoved(..) => { + if editor.tasks_update_task.is_none() { + editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); } - project::Event::SnippetEdit(id, snippet_edits) => { - if let Some(buffer) = editor.buffer.read(cx).buffer(*id) { - let focus_handle = editor.focus_handle(cx); - if focus_handle.is_focused(window) { - let snapshot = buffer.read(cx).snapshot(); - for (range, snippet) in snippet_edits { - let editor_range = - language::range_from_lsp(*range).to_offset(&snapshot); - editor - .insert_snippet( - &[editor_range], - snippet.clone(), - window, - cx, - ) - .ok(); - } + } + project::Event::SnippetEdit(id, snippet_edits) => { + if let Some(buffer) = editor.buffer.read(cx).buffer(*id) { + let focus_handle = editor.focus_handle(cx); + if focus_handle.is_focused(window) { + let snapshot = buffer.read(cx).snapshot(); + for (range, snippet) in snippet_edits { + let editor_range = + language::range_from_lsp(*range).to_offset(&snapshot); + editor + .insert_snippet( + &[editor_range], + snippet.clone(), + window, + cx, + ) + .ok(); } } } - project::Event::LanguageServerBufferRegistered { buffer_id, .. } => { - if editor.buffer().read(cx).buffer(*buffer_id).is_some() { - editor.update_lsp_data(false, Some(*buffer_id), window, cx); - } + } + project::Event::LanguageServerBufferRegistered { buffer_id, .. } => { + if editor.buffer().read(cx).buffer(*buffer_id).is_some() { + editor.update_lsp_data(false, Some(*buffer_id), window, cx); } - _ => {} - }, - )); - if let Some(task_inventory) = project - .read(cx) - .task_store() - .read(cx) - .task_inventory() - .cloned() - { - project_subscriptions.push(cx.observe_in( - &task_inventory, - window, - |editor, _, window, cx| { - editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); - }, - )); - }; + } - project_subscriptions.push(cx.subscribe_in( - &project.read(cx).breakpoint_store(), - window, - |editor, _, event, window, cx| match event { - BreakpointStoreEvent::ClearDebugLines => { - editor.clear_row_highlights::(); - editor.refresh_inline_values(cx); - } - BreakpointStoreEvent::SetDebugLine => { - if editor.go_to_active_debug_line(window, cx) { - cx.stop_propagation(); - } + project::Event::EntryRenamed(transaction) => { + let Some(workspace) = editor.workspace() else { + return; + }; + let Some(active_editor) = workspace.read(cx).active_item_as::(cx) + else { + return; + }; + if active_editor.entity_id() == cx.entity_id() { + let edited_buffers_already_open = { + let other_editors: Vec> = workspace + .read(cx) + .panes() + .iter() + .flat_map(|pane| pane.read(cx).items_of_type::()) + .filter(|editor| editor.entity_id() != cx.entity_id()) + .collect(); + + transaction.0.keys().all(|buffer| { + other_editors.iter().any(|editor| { + let multi_buffer = editor.read(cx).buffer(); + multi_buffer.read(cx).is_singleton() + && multi_buffer.read(cx).as_singleton().map_or( + false, + |singleton| { + singleton.entity_id() == buffer.entity_id() + }, + ) + }) + }) + }; - editor.refresh_inline_values(cx); + if !edited_buffers_already_open { + let workspace = workspace.downgrade(); + let transaction = transaction.clone(); + cx.defer_in(window, move |_, window, cx| { + cx.spawn_in(window, async move |editor, cx| { + Self::open_project_transaction( + &editor, + workspace, + transaction, + "Rename".to_string(), + cx, + ) + .await + .ok() + }) + .detach(); + }); + } } - _ => {} + } + + _ => {} + }, + )); + if let Some(task_inventory) = project + .read(cx) + .task_store() + .read(cx) + .task_inventory() + .cloned() + { + project_subscriptions.push(cx.observe_in( + &task_inventory, + window, + |editor, _, window, cx| { + editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); }, )); - let git_store = project.read(cx).git_store().clone(); - let project = project.clone(); - project_subscriptions.push(cx.subscribe(&git_store, move |this, _, event, cx| { - match event { - GitStoreEvent::RepositoryUpdated( - _, - RepositoryEvent::Updated { - new_instance: true, .. - }, - _, - ) => { - this.load_diff_task = Some( - update_uncommitted_diff_for_buffer( - cx.entity(), - &project, - this.buffer.read(cx).all_buffers(), - this.buffer.clone(), - cx, - ) - .shared(), - ); + }; + + project_subscriptions.push(cx.subscribe_in( + &project.read(cx).breakpoint_store(), + window, + |editor, _, event, window, cx| match event { + BreakpointStoreEvent::ClearDebugLines => { + editor.clear_row_highlights::(); + editor.refresh_inline_values(cx); + } + BreakpointStoreEvent::SetDebugLine => { + if editor.go_to_active_debug_line(window, cx) { + cx.stop_propagation(); } - _ => {} + + editor.refresh_inline_values(cx); } - })); - } + _ => {} + }, + )); + let git_store = project.read(cx).git_store().clone(); + let project = project.clone(); + project_subscriptions.push(cx.subscribe(&git_store, move |this, _, event, cx| { + if let GitStoreEvent::RepositoryUpdated( + _, + RepositoryEvent::Updated { + new_instance: true, .. + }, + _, + ) = event + { + this.load_diff_task = Some( + update_uncommitted_diff_for_buffer( + cx.entity(), + &project, + this.buffer.read(cx).all_buffers(), + this.buffer.clone(), + cx, + ) + .shared(), + ); + } + })); } let buffer_snapshot = buffer.read(cx).snapshot(cx); @@ -1973,14 +2034,12 @@ impl Editor { .detach(); } - let show_indent_guides = if matches!( - mode, - EditorMode::SingleLine { .. } | EditorMode::Minimap { .. } - ) { - Some(false) - } else { - None - }; + let show_indent_guides = + if matches!(mode, EditorMode::SingleLine | EditorMode::Minimap { .. }) { + Some(false) + } else { + None + }; let breakpoint_store = match (&mode, project.as_ref()) { (EditorMode::Full { .. }, Some(project)) => Some(project.read(cx).breakpoint_store()), @@ -2040,7 +2099,7 @@ impl Editor { vertical: full_mode, }, minimap_visibility: MinimapVisibility::for_mode(&mode, cx), - offset_content: !matches!(mode, EditorMode::SingleLine { .. }), + offset_content: !matches!(mode, EditorMode::SingleLine), show_breadcrumbs: EditorSettings::get_global(cx).toolbar.breadcrumbs, show_gutter: full_mode, show_line_numbers: (!full_mode).then_some(false), @@ -2307,15 +2366,15 @@ impl Editor { editor.go_to_active_debug_line(window, cx); - if let Some(buffer) = buffer.read(cx).as_singleton() { - if let Some(project) = editor.project.as_ref() { - let handle = project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&buffer, cx) - }); - editor - .registered_buffers - .insert(buffer.read(cx).remote_id(), handle); - } + if let Some(buffer) = buffer.read(cx).as_singleton() + && let Some(project) = editor.project() + { + let handle = project.update(cx, |project, cx| { + project.register_buffer_with_language_servers(&buffer, cx) + }); + editor + .registered_buffers + .insert(buffer.read(cx).remote_id(), handle); } editor.minimap = @@ -2325,7 +2384,7 @@ impl Editor { } if editor.mode.is_full() { - editor.report_editor_event("Editor Opened", None, cx); + editor.report_editor_event(ReportEditorEvent::EditorOpened, None, cx); } editor @@ -2353,6 +2412,34 @@ impl Editor { .is_some_and(|menu| menu.context_menu.focus_handle(cx).is_focused(window)) } + pub fn is_range_selected(&mut self, range: &Range, cx: &mut Context) -> bool { + if self + .selections + .pending + .as_ref() + .is_some_and(|pending_selection| { + let snapshot = self.buffer().read(cx).snapshot(cx); + pending_selection + .selection + .range() + .includes(range, &snapshot) + }) + { + return true; + } + + self.selections + .disjoint_in_range::(range.clone(), cx) + .into_iter() + .any(|selection| { + // This is needed to cover a corner case, if we just check for an existing + // selection in the fold range, having a cursor at the start of the fold + // marks it as selected. Non-empty selections don't cause this. + let length = selection.end - selection.start; + length > 0 + }) + } + pub fn key_context(&self, window: &Window, cx: &App) -> KeyContext { self.key_context_internal(self.has_active_edit_prediction(), window, cx) } @@ -2366,7 +2453,7 @@ impl Editor { let mut key_context = KeyContext::new_with_defaults(); key_context.add("Editor"); let mode = match self.mode { - EditorMode::SingleLine { .. } => "single_line", + EditorMode::SingleLine => "single_line", EditorMode::AutoHeight { .. } => "auto_height", EditorMode::Minimap { .. } => "minimap", EditorMode::Full { .. } => "full", @@ -2472,9 +2559,7 @@ impl Editor { .context_menu .borrow() .as_ref() - .map_or(false, |context| { - matches!(context, CodeContextMenu::Completions(_)) - }); + .is_some_and(|context| matches!(context, CodeContextMenu::Completions(_))); showing_completions || self.edit_prediction_requires_modifier() @@ -2505,7 +2590,7 @@ impl Editor { || binding .keystrokes() .first() - .map_or(false, |keystroke| keystroke.modifiers.modified()) + .is_some_and(|keystroke| keystroke.modifiers.modified()) })) } @@ -2608,6 +2693,10 @@ impl Editor { &self.buffer } + pub fn project(&self) -> Option<&Entity> { + self.project.as_ref() + } + pub fn workspace(&self) -> Option> { self.workspace.as_ref()?.0.upgrade() } @@ -2897,7 +2986,7 @@ impl Editor { return false; }; - scope.override_name().map_or(false, |scope_name| { + scope.override_name().is_some_and(|scope_name| { settings .edit_predictions_disabled_in .iter() @@ -2987,20 +3076,19 @@ impl Editor { } if local { - if let Some(buffer_id) = new_cursor_position.buffer_id { - if !self.registered_buffers.contains_key(&buffer_id) { - if let Some(project) = self.project.as_ref() { - project.update(cx, |project, cx| { - let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else { - return; - }; - self.registered_buffers.insert( - buffer_id, - project.register_buffer_with_language_servers(&buffer, cx), - ); - }) - } - } + if let Some(buffer_id) = new_cursor_position.buffer_id + && !self.registered_buffers.contains_key(&buffer_id) + && let Some(project) = self.project.as_ref() + { + project.update(cx, |project, cx| { + let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else { + return; + }; + self.registered_buffers.insert( + buffer_id, + project.register_buffer_with_language_servers(&buffer, cx), + ); + }) } let mut context_menu = self.context_menu.borrow_mut(); @@ -3015,28 +3103,28 @@ impl Editor { let completion_position = completion_menu.map(|menu| menu.initial_position); drop(context_menu); - if effects.completions { - if let Some(completion_position) = completion_position { - let start_offset = selection_start.to_offset(buffer); - let position_matches = start_offset == completion_position.to_offset(buffer); - let continue_showing = if position_matches { - if self.snippet_stack.is_empty() { - buffer.char_kind_before(start_offset, true) == Some(CharKind::Word) - } else { - // Snippet choices can be shown even when the cursor is in whitespace. - // Dismissing the menu with actions like backspace is handled by - // invalidation regions. - true - } - } else { - false - }; - - if continue_showing { - self.show_completions(&ShowCompletions { trigger: None }, window, cx); + if effects.completions + && let Some(completion_position) = completion_position + { + let start_offset = selection_start.to_offset(buffer); + let position_matches = start_offset == completion_position.to_offset(buffer); + let continue_showing = if position_matches { + if self.snippet_stack.is_empty() { + buffer.char_kind_before(start_offset, true) == Some(CharKind::Word) } else { - self.hide_context_menu(window, cx); + // Snippet choices can be shown even when the cursor is in whitespace. + // Dismissing the menu with actions like backspace is handled by + // invalidation regions. + true } + } else { + false + }; + + if continue_showing { + self.show_completions(&ShowCompletions { trigger: None }, window, cx); + } else { + self.hide_context_menu(window, cx); } } @@ -3067,30 +3155,27 @@ impl Editor { if selections.len() == 1 { cx.emit(SearchEvent::ActiveMatchChanged) } - if local { - if let Some((_, _, buffer_snapshot)) = buffer.as_singleton() { - let inmemory_selections = selections - .iter() - .map(|s| { - text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot) - ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot) - }) - .collect(); - self.update_restoration_data(cx, |data| { - data.selections = inmemory_selections; - }); + if local && let Some((_, _, buffer_snapshot)) = buffer.as_singleton() { + let inmemory_selections = selections + .iter() + .map(|s| { + text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot) + ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot) + }) + .collect(); + self.update_restoration_data(cx, |data| { + data.selections = inmemory_selections; + }); - if WorkspaceSettings::get(None, cx).restore_on_startup - != RestoreOnStartupBehavior::None - { - if let Some(workspace_id) = - self.workspace.as_ref().and_then(|workspace| workspace.1) - { - let snapshot = self.buffer().read(cx).snapshot(cx); - let selections = selections.clone(); - let background_executor = cx.background_executor().clone(); - let editor_id = cx.entity().entity_id().as_u64() as ItemId; - self.serialize_selections = cx.background_spawn(async move { + if WorkspaceSettings::get(None, cx).restore_on_startup != RestoreOnStartupBehavior::None + && let Some(workspace_id) = + self.workspace.as_ref().and_then(|workspace| workspace.1) + { + let snapshot = self.buffer().read(cx).snapshot(cx); + let selections = selections.clone(); + let background_executor = cx.background_executor().clone(); + let editor_id = cx.entity().entity_id().as_u64() as ItemId; + self.serialize_selections = cx.background_spawn(async move { background_executor.timer(SERIALIZATION_THROTTLE_TIME).await; let db_selections = selections .iter() @@ -3107,8 +3192,6 @@ impl Editor { .with_context(|| format!("persisting editor selections for editor {editor_id}, workspace {workspace_id:?}")) .log_err(); }); - } - } } } @@ -3185,35 +3268,31 @@ impl Editor { selections.select_anchors(other_selections); }); - let other_subscription = - cx.subscribe(&other, |this, other, other_evt, cx| match other_evt { - EditorEvent::SelectionsChanged { local: true } => { - let other_selections = other.read(cx).selections.disjoint.to_vec(); - if other_selections.is_empty() { - return; - } - this.selections.change_with(cx, |selections| { - selections.select_anchors(other_selections); - }); + let other_subscription = cx.subscribe(&other, |this, other, other_evt, cx| { + if let EditorEvent::SelectionsChanged { local: true } = other_evt { + let other_selections = other.read(cx).selections.disjoint.to_vec(); + if other_selections.is_empty() { + return; } - _ => {} - }); + this.selections.change_with(cx, |selections| { + selections.select_anchors(other_selections); + }); + } + }); - let this_subscription = - cx.subscribe_self::(move |this, this_evt, cx| match this_evt { - EditorEvent::SelectionsChanged { local: true } => { - let these_selections = this.selections.disjoint.to_vec(); - if these_selections.is_empty() { - return; - } - other.update(cx, |other_editor, cx| { - other_editor.selections.change_with(cx, |selections| { - selections.select_anchors(these_selections); - }) - }); + let this_subscription = cx.subscribe_self::(move |this, this_evt, cx| { + if let EditorEvent::SelectionsChanged { local: true } = this_evt { + let these_selections = this.selections.disjoint.to_vec(); + if these_selections.is_empty() { + return; } - _ => {} - }); + other.update(cx, |other_editor, cx| { + other_editor.selections.change_with(cx, |selections| { + selections.select_anchors(these_selections); + }) + }); + } + }); Subscription::join(other_subscription, this_subscription) } @@ -3294,9 +3373,9 @@ impl Editor { let old_cursor_position = &state.old_cursor_position; - self.selections_did_change(true, &old_cursor_position, state.effects, window, cx); + self.selections_did_change(true, old_cursor_position, state.effects, window, cx); - if self.should_open_signature_help_automatically(&old_cursor_position, cx) { + if self.should_open_signature_help_automatically(old_cursor_position, cx) { self.show_signature_help(&ShowSignatureHelp, window, cx); } } @@ -3716,9 +3795,9 @@ impl Editor { ColumnarSelectionState::FromMouse { selection_tail, display_point, - } => display_point.unwrap_or_else(|| selection_tail.to_display_point(&display_map)), + } => display_point.unwrap_or_else(|| selection_tail.to_display_point(display_map)), ColumnarSelectionState::FromSelection { selection_tail } => { - selection_tail.to_display_point(&display_map) + selection_tail.to_display_point(display_map) } }; @@ -3995,18 +4074,18 @@ impl Editor { let following_text_allows_autoclose = snapshot .chars_at(selection.start) .next() - .map_or(true, |c| scope.should_autoclose_before(c)); + .is_none_or(|c| scope.should_autoclose_before(c)); let preceding_text_allows_autoclose = selection.start.column == 0 - || snapshot.reversed_chars_at(selection.start).next().map_or( - true, - |c| { + || snapshot + .reversed_chars_at(selection.start) + .next() + .is_none_or(|c| { bracket_pair.start != bracket_pair.end || !snapshot .char_classifier_at(selection.start) .is_word(c) - }, - ); + }); let is_closing_quote = if bracket_pair.end == bracket_pair.start && bracket_pair.start.len() == 1 @@ -4106,42 +4185,38 @@ impl Editor { if self.auto_replace_emoji_shortcode && selection.is_empty() && text.as_ref().ends_with(':') - { - if let Some(possible_emoji_short_code) = + && let Some(possible_emoji_short_code) = Self::find_possible_emoji_shortcode_at_position(&snapshot, selection.start) - { - if !possible_emoji_short_code.is_empty() { - if let Some(emoji) = emojis::get_by_shortcode(&possible_emoji_short_code) { - let emoji_shortcode_start = Point::new( - selection.start.row, - selection.start.column - possible_emoji_short_code.len() as u32 - 1, - ); + && !possible_emoji_short_code.is_empty() + && let Some(emoji) = emojis::get_by_shortcode(&possible_emoji_short_code) + { + let emoji_shortcode_start = Point::new( + selection.start.row, + selection.start.column - possible_emoji_short_code.len() as u32 - 1, + ); - // Remove shortcode from buffer - edits.push(( - emoji_shortcode_start..selection.start, - "".to_string().into(), - )); - new_selections.push(( - Selection { - id: selection.id, - start: snapshot.anchor_after(emoji_shortcode_start), - end: snapshot.anchor_before(selection.start), - reversed: selection.reversed, - goal: selection.goal, - }, - 0, - )); + // Remove shortcode from buffer + edits.push(( + emoji_shortcode_start..selection.start, + "".to_string().into(), + )); + new_selections.push(( + Selection { + id: selection.id, + start: snapshot.anchor_after(emoji_shortcode_start), + end: snapshot.anchor_before(selection.start), + reversed: selection.reversed, + goal: selection.goal, + }, + 0, + )); - // Insert emoji - let selection_start_anchor = snapshot.anchor_after(selection.start); - new_selections.push((selection.map(|_| selection_start_anchor), 0)); - edits.push((selection.start..selection.end, emoji.to_string().into())); + // Insert emoji + let selection_start_anchor = snapshot.anchor_after(selection.start); + new_selections.push((selection.map(|_| selection_start_anchor), 0)); + edits.push((selection.start..selection.end, emoji.to_string().into())); - continue; - } - } - } + continue; } // If not handling any auto-close operation, then just replace the selected @@ -4151,7 +4226,7 @@ impl Editor { if !self.linked_edit_ranges.is_empty() { let start_anchor = snapshot.anchor_before(selection.start); - let is_word_char = text.chars().next().map_or(true, |char| { + let is_word_char = text.chars().next().is_none_or(|char| { let classifier = snapshot .char_classifier_at(start_anchor.to_offset(&snapshot)) .ignore_punctuation(true); @@ -4255,12 +4330,11 @@ impl Editor { |s| s.select(new_selections), ); - if !bracket_inserted { - if let Some(on_type_format_task) = + if !bracket_inserted + && let Some(on_type_format_task) = this.trigger_on_type_formatting(text.to_string(), window, cx) - { - on_type_format_task.detach_and_log_err(cx); - } + { + on_type_format_task.detach_and_log_err(cx); } let editor_settings = EditorSettings::get_global(cx); @@ -4506,7 +4580,7 @@ impl Editor { let mut char_position = 0u32; let mut end_tag_offset = None; - 'outer: for chunk in snapshot.text_for_range(range.clone()) { + 'outer: for chunk in snapshot.text_for_range(range) { if let Some(byte_pos) = chunk.find(&**end_tag) { let chars_before_match = chunk[..byte_pos].chars().count() as u32; @@ -4856,11 +4930,7 @@ impl Editor { cx: &mut Context, ) -> bool { let position = self.selections.newest_anchor().head(); - let multibuffer = self.buffer.read(cx); - let Some(buffer) = position - .buffer_id - .and_then(|buffer_id| multibuffer.buffer(buffer_id).clone()) - else { + let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else { return false; }; @@ -5194,7 +5264,7 @@ impl Editor { restrict_to_languages: Option<&HashSet>>, cx: &mut Context, ) -> HashMap, clock::Global, Range)> { - let Some(project) = self.project.as_ref() else { + let Some(project) = self.project() else { return HashMap::default(); }; let project = project.read(cx); @@ -5226,10 +5296,10 @@ impl Editor { } let language = buffer.language()?; - if let Some(restrict_to_languages) = restrict_to_languages { - if !restrict_to_languages.contains(language) { - return None; - } + if let Some(restrict_to_languages) = restrict_to_languages + && !restrict_to_languages.contains(language) + { + return None; } Some(( excerpt_id, @@ -5276,7 +5346,7 @@ impl Editor { return None; } - let project = self.project.as_ref()?; + let project = self.project()?; let position = self.selections.newest_anchor().head(); let (buffer, buffer_position) = self .buffer @@ -5394,11 +5464,11 @@ impl Editor { let sort_completions = provider .as_ref() - .map_or(false, |provider| provider.sort_completions()); + .is_some_and(|provider| provider.sort_completions()); let filter_completions = provider .as_ref() - .map_or(true, |provider| provider.filter_completions()); + .is_none_or(|provider| provider.filter_completions()); let trigger_kind = match trigger { Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => { @@ -5504,7 +5574,7 @@ impl Editor { let skip_digits = query .as_ref() - .map_or(true, |query| !query.chars().any(|c| c.is_digit(10))); + .is_none_or(|query| !query.chars().any(|c| c.is_digit(10))); let (mut words, provider_responses) = match &provider { Some(provider) => { @@ -5557,15 +5627,15 @@ impl Editor { // that having one source with `is_incomplete: true` doesn't cause all to be re-queried. let mut completions = Vec::new(); let mut is_incomplete = false; - if let Some(provider_responses) = provider_responses.await.log_err() { - if !provider_responses.is_empty() { - for response in provider_responses { - completions.extend(response.completions); - is_incomplete = is_incomplete || response.is_incomplete; - } - if completion_settings.words == WordsCompletionMode::Fallback { - words = Task::ready(BTreeMap::default()); - } + if let Some(provider_responses) = provider_responses.await.log_err() + && !provider_responses.is_empty() + { + for response in provider_responses { + completions.extend(response.completions); + is_incomplete = is_incomplete || response.is_incomplete; + } + if completion_settings.words == WordsCompletionMode::Fallback { + words = Task::ready(BTreeMap::default()); } } @@ -5630,34 +5700,31 @@ impl Editor { let Ok(()) = editor.update_in(cx, |editor, window, cx| { // Newer menu already set, so exit. - match editor.context_menu.borrow().as_ref() { - Some(CodeContextMenu::Completions(prev_menu)) => { - if prev_menu.id > id { - return; - } - } - _ => {} + if let Some(CodeContextMenu::Completions(prev_menu)) = + editor.context_menu.borrow().as_ref() + && prev_menu.id > id + { + return; }; // Only valid to take prev_menu because it the new menu is immediately set // below, or the menu is hidden. - match editor.context_menu.borrow_mut().take() { - Some(CodeContextMenu::Completions(prev_menu)) => { - let position_matches = - if prev_menu.initial_position == menu.initial_position { - true - } else { - let snapshot = editor.buffer.read(cx).read(cx); - prev_menu.initial_position.to_offset(&snapshot) - == menu.initial_position.to_offset(&snapshot) - }; - if position_matches { - // Preserve markdown cache before `set_filter_results` because it will - // try to populate the documentation cache. - menu.preserve_markdown_cache(prev_menu); - } + if let Some(CodeContextMenu::Completions(prev_menu)) = + editor.context_menu.borrow_mut().take() + { + let position_matches = + if prev_menu.initial_position == menu.initial_position { + true + } else { + let snapshot = editor.buffer.read(cx).read(cx); + prev_menu.initial_position.to_offset(&snapshot) + == menu.initial_position.to_offset(&snapshot) + }; + if position_matches { + // Preserve markdown cache before `set_filter_results` because it will + // try to populate the documentation cache. + menu.preserve_markdown_cache(prev_menu); } - _ => {} }; menu.set_filter_results(matches, provider, window, cx); @@ -5670,21 +5737,21 @@ impl Editor { editor .update_in(cx, |editor, window, cx| { - if editor.focus_handle.is_focused(window) { - if let Some(menu) = menu { - *editor.context_menu.borrow_mut() = - Some(CodeContextMenu::Completions(menu)); - - crate::hover_popover::hide_hover(editor, cx); - if editor.show_edit_predictions_in_menu() { - editor.update_visible_edit_prediction(window, cx); - } else { - editor.discard_edit_prediction(false, cx); - } + if editor.focus_handle.is_focused(window) + && let Some(menu) = menu + { + *editor.context_menu.borrow_mut() = + Some(CodeContextMenu::Completions(menu)); - cx.notify(); - return; + crate::hover_popover::hide_hover(editor, cx); + if editor.show_edit_predictions_in_menu() { + editor.update_visible_edit_prediction(window, cx); + } else { + editor.discard_edit_prediction(false, cx); } + + cx.notify(); + return; } if editor.completion_tasks.len() <= 1 { @@ -5827,7 +5894,7 @@ impl Editor { multibuffer_anchor.start.to_offset(&snapshot) ..multibuffer_anchor.end.to_offset(&snapshot) }; - if newest_anchor.head().buffer_id != Some(buffer.remote_id()) { + if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) { return None; } @@ -5938,7 +6005,7 @@ impl Editor { let show_new_completions_on_confirm = completion .confirm .as_ref() - .map_or(false, |confirm| confirm(intent, window, cx)); + .is_some_and(|confirm| confirm(intent, window, cx)); if show_new_completions_on_confirm { self.show_completions(&ShowCompletions { trigger: None }, window, cx); } @@ -6031,11 +6098,11 @@ impl Editor { Some(CodeActionSource::Indicator(_)) => Task::ready(Ok(Default::default())), _ => { let mut task_context_task = Task::ready(None); - if let Some(tasks) = &tasks { - if let Some(project) = project { - task_context_task = - Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx); - } + if let Some(tasks) = &tasks + && let Some(project) = project + { + task_context_task = + Self::build_tasks_context(&project, &buffer, buffer_row, tasks, cx); } cx.spawn_in(window, { @@ -6070,10 +6137,10 @@ impl Editor { let spawn_straight_away = quick_launch && resolved_tasks .as_ref() - .map_or(false, |tasks| tasks.templates.len() == 1) + .is_some_and(|tasks| tasks.templates.len() == 1) && code_actions .as_ref() - .map_or(true, |actions| actions.is_empty()) + .is_none_or(|actions| actions.is_empty()) && debug_scenarios.is_empty(); editor.update_in(cx, |editor, window, cx| { @@ -6100,14 +6167,14 @@ impl Editor { deployed_from, })); cx.notify(); - if spawn_straight_away { - if let Some(task) = editor.confirm_code_action( + if spawn_straight_away + && let Some(task) = editor.confirm_code_action( &ConfirmCodeAction { item_ix: Some(0) }, window, cx, - ) { - return task; - } + ) + { + return task; } Task::ready(Ok(())) @@ -6123,7 +6190,7 @@ impl Editor { cx: &mut App, ) -> Task> { maybe!({ - let project = self.project.as_ref()?; + let project = self.project()?; let dap_store = project.read(cx).dap_store(); let mut scenarios = vec![]; let resolved_tasks = resolved_tasks.as_ref()?; @@ -6148,12 +6215,11 @@ impl Editor { } }); Some(cx.background_spawn(async move { - let scenarios = futures::future::join_all(scenarios) + futures::future::join_all(scenarios) .await .into_iter() .flatten() - .collect::>(); - scenarios + .collect::>() })) }) .unwrap_or_else(|| Task::ready(vec![])) @@ -6251,7 +6317,7 @@ impl Editor { })) } CodeActionsItem::DebugScenario(scenario) => { - let context = actions_menu.actions.context.clone(); + let context = actions_menu.actions.context; workspace.update(cx, |workspace, cx| { dap::send_telemetry(&scenario, TelemetrySpawnLocation::Gutter, cx); @@ -6270,7 +6336,7 @@ impl Editor { } pub async fn open_project_transaction( - this: &WeakEntity, + editor: &WeakEntity, workspace: WeakEntity, transaction: ProjectTransaction, title: String, @@ -6288,27 +6354,26 @@ impl Editor { if let Some((buffer, transaction)) = entries.first() { if entries.len() == 1 { - let excerpt = this.update(cx, |editor, cx| { + let excerpt = editor.update(cx, |editor, cx| { editor .buffer() .read(cx) .excerpt_containing(editor.selections.newest_anchor().head(), cx) })?; - if let Some((_, excerpted_buffer, excerpt_range)) = excerpt { - if excerpted_buffer == *buffer { - let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| { - let excerpt_range = excerpt_range.to_offset(buffer); - buffer - .edited_ranges_for_transaction::(transaction) - .all(|range| { - excerpt_range.start <= range.start - && excerpt_range.end >= range.end - }) - })?; + if let Some((_, excerpted_buffer, excerpt_range)) = excerpt + && excerpted_buffer == *buffer + { + let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| { + let excerpt_range = excerpt_range.to_offset(buffer); + buffer + .edited_ranges_for_transaction::(transaction) + .all(|range| { + excerpt_range.start <= range.start && excerpt_range.end >= range.end + }) + })?; - if all_edits_within_excerpt { - return Ok(()); - } + if all_edits_within_excerpt { + return Ok(()); } } } @@ -6452,7 +6517,7 @@ impl Editor { fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context) -> Option<()> { let newest_selection = self.selections.newest_anchor().clone(); - let newest_selection_adjusted = self.selections.newest_adjusted(cx).clone(); + let newest_selection_adjusted = self.selections.newest_adjusted(cx); let buffer = self.buffer.read(cx); if newest_selection.head().diff_base_anchor.is_some() { return None; @@ -6686,11 +6751,10 @@ impl Editor { return; } - let buffer_id = cursor_position.buffer_id; let buffer = this.buffer.read(cx); - if !buffer + if buffer .text_anchor_for_position(cursor_position, cx) - .map_or(false, |(buffer, _)| buffer == cursor_buffer) + .is_none_or(|(buffer, _)| buffer != cursor_buffer) { return; } @@ -6699,8 +6763,8 @@ impl Editor { let mut write_ranges = Vec::new(); let mut read_ranges = Vec::new(); for highlight in highlights { - for (excerpt_id, excerpt_range) in - buffer.excerpts_for_buffer(cursor_buffer.read(cx).remote_id(), cx) + let buffer_id = cursor_buffer.read(cx).remote_id(); + for (excerpt_id, excerpt_range) in buffer.excerpts_for_buffer(buffer_id, cx) { let start = highlight .range @@ -6715,12 +6779,12 @@ impl Editor { } let range = Anchor { - buffer_id, + buffer_id: Some(buffer_id), excerpt_id, text_anchor: start, diff_base_anchor: None, }..Anchor { - buffer_id, + buffer_id: Some(buffer_id), excerpt_id, text_anchor: end, diff_base_anchor: None, @@ -6755,7 +6819,7 @@ impl Editor { &mut self, cx: &mut Context, ) -> Option<(String, Range)> { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { return None; } if !EditorSettings::get_global(cx).selection_highlight { @@ -6816,7 +6880,7 @@ impl Editor { for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { match_ranges.extend( regex - .search(&buffer_snapshot, Some(search_range.clone())) + .search(buffer_snapshot, Some(search_range.clone())) .await .into_iter() .filter_map(|match_range| { @@ -6940,9 +7004,7 @@ impl Editor { || self .quick_selection_highlight_task .as_ref() - .map_or(true, |(prev_anchor_range, _)| { - prev_anchor_range != &query_range - }) + .is_none_or(|(prev_anchor_range, _)| prev_anchor_range != &query_range) { let multi_buffer_visible_start = self .scroll_manager @@ -6971,9 +7033,7 @@ impl Editor { || self .debounced_selection_highlight_task .as_ref() - .map_or(true, |(prev_anchor_range, _)| { - prev_anchor_range != &query_range - }) + .is_none_or(|(prev_anchor_range, _)| prev_anchor_range != &query_range) { let multi_buffer_start = multi_buffer_snapshot .anchor_before(0) @@ -7108,9 +7168,7 @@ impl Editor { && self .edit_prediction_provider .as_ref() - .map_or(false, |provider| { - provider.provider.show_completions_in_menu() - }); + .is_some_and(|provider| provider.provider.show_completions_in_menu()); let preview_requires_modifier = all_language_settings(file, cx).edit_predictions_mode() == EditPredictionsMode::Subtle; @@ -7158,7 +7216,7 @@ impl Editor { return Some(false); } let provider = self.edit_prediction_provider()?; - if !provider.is_enabled(&buffer, buffer_position, cx) { + if !provider.is_enabled(buffer, buffer_position, cx) { return Some(false); } let buffer = buffer.read(cx); @@ -7694,7 +7752,7 @@ impl Editor { || self .active_edit_prediction .as_ref() - .map_or(false, |completion| { + .is_some_and(|completion| { let invalidation_range = completion.invalidation_range.to_offset(&multibuffer); let invalidation_range = invalidation_range.start..=invalidation_range.end; !invalidation_range.contains(&offset_selection.head()) @@ -7716,6 +7774,11 @@ impl Editor { self.edit_prediction_settings = self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); + if let EditPredictionSettings::Disabled = self.edit_prediction_settings { + self.discard_edit_prediction(false, cx); + return None; + }; + self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor); if self.edit_prediction_indent_conflict { @@ -7723,10 +7786,10 @@ impl Editor { let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx); - if let Some((_, indent)) = indents.iter().next() { - if indent.len == cursor_point.column { - self.edit_prediction_indent_conflict = false; - } + if let Some((_, indent)) = indents.iter().next() + && indent.len == cursor_point.column + { + self.edit_prediction_indent_conflict = false; } } @@ -7889,7 +7952,7 @@ impl Editor { let snapshot = self.snapshot(window, cx); let multi_buffer_snapshot = &snapshot.display_snapshot.buffer_snapshot; - let Some(project) = self.project.as_ref() else { + let Some(project) = self.project() else { return breakpoint_display_points; }; @@ -7918,7 +7981,7 @@ impl Editor { let multi_buffer_anchor = Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), breakpoint.position); let position = multi_buffer_anchor - .to_point(&multi_buffer_snapshot) + .to_point(multi_buffer_snapshot) .to_display_point(&snapshot); breakpoint_display_points.insert( @@ -8172,8 +8235,6 @@ impl Editor { .icon_color(color) .style(ButtonStyle::Transparent) .on_click(cx.listener({ - let breakpoint = breakpoint.clone(); - move |editor, event: &ClickEvent, window, cx| { let edit_action = if event.modifiers().platform || breakpoint.is_disabled() { BreakpointEditAction::InvertState @@ -8387,7 +8448,7 @@ impl Editor { .context_menu .borrow() .as_ref() - .map_or(false, |menu| menu.visible()) + .is_some_and(|menu| menu.visible()) } pub fn context_menu_origin(&self) -> Option { @@ -8811,7 +8872,7 @@ impl Editor { } let highlighted_edits = if let Some(edit_preview) = edit_preview.as_ref() { - crate::edit_prediction_edit_text(&snapshot, edits, edit_preview, false, cx) + crate::edit_prediction_edit_text(snapshot, edits, edit_preview, false, cx) } else { // Fallback for providers without edit_preview crate::edit_prediction_fallback_text(edits, cx) @@ -8933,9 +8994,8 @@ impl Editor { let end_row = start_row + line_count as u32; visible_row_range.contains(&start_row) && visible_row_range.contains(&end_row) - && cursor_row.map_or(true, |cursor_row| { - !((start_row..end_row).contains(&cursor_row)) - }) + && cursor_row + .is_none_or(|cursor_row| !((start_row..end_row).contains(&cursor_row))) })?; content_origin @@ -9124,7 +9184,7 @@ impl Editor { .on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default()) .on_click(cx.listener(|this, _event, window, cx| { cx.stop_propagation(); - this.report_editor_event("Edit Prediction Provider ToS Clicked", None, cx); + this.report_editor_event(ReportEditorEvent::ZetaTosClicked, None, cx); window.dispatch_action( zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx, @@ -9174,7 +9234,7 @@ impl Editor { .child(div().px_1p5().child(match &prediction.completion { EditPrediction::Move { target, snapshot } => { use text::ToPoint as _; - if target.text_anchor.to_point(&snapshot).row > cursor_point.row + if target.text_anchor.to_point(snapshot).row > cursor_point.row { Icon::new(IconName::ZedPredictDown) } else { @@ -9376,7 +9436,7 @@ impl Editor { .gap_2() .flex_1() .child( - if target.text_anchor.to_point(&snapshot).row > cursor_point.row { + if target.text_anchor.to_point(snapshot).row > cursor_point.row { Icon::new(IconName::ZedPredictDown) } else { Icon::new(IconName::ZedPredictUp) @@ -9392,14 +9452,14 @@ impl Editor { snapshot, display_mode: _, } => { - let first_edit_row = edits.first()?.0.start.text_anchor.to_point(&snapshot).row; + let first_edit_row = edits.first()?.0.start.text_anchor.to_point(snapshot).row; let (highlighted_edits, has_more_lines) = if let Some(edit_preview) = edit_preview.as_ref() { - crate::edit_prediction_edit_text(&snapshot, &edits, edit_preview, true, cx) + crate::edit_prediction_edit_text(snapshot, edits, edit_preview, true, cx) .first_line_preview() } else { - crate::edit_prediction_fallback_text(&edits, cx).first_line_preview() + crate::edit_prediction_fallback_text(edits, cx).first_line_preview() }; let styled_text = gpui::StyledText::new(highlighted_edits.text) @@ -9475,10 +9535,10 @@ impl Editor { let context_menu = self.context_menu.borrow_mut().take(); self.stale_edit_prediction_in_menu.take(); self.update_visible_edit_prediction(window, cx); - if let Some(CodeContextMenu::Completions(_)) = &context_menu { - if let Some(completion_provider) = &self.completion_provider { - completion_provider.selection_changed(None, window, cx); - } + if let Some(CodeContextMenu::Completions(_)) = &context_menu + && let Some(completion_provider) = &self.completion_provider + { + completion_provider.selection_changed(None, window, cx); } context_menu } @@ -9488,18 +9548,22 @@ impl Editor { choices: &Vec, selection: Range, cx: &mut Context, - ) { - let buffer_id = match (&selection.start.buffer_id, &selection.end.buffer_id) { - (Some(a), Some(b)) if a == b => a, - _ => { - log::error!("expected anchor range to have matching buffer IDs"); - return; - } + ) { + let Some((_, buffer, _)) = self + .buffer() + .read(cx) + .excerpt_containing(selection.start, cx) + else { + return; }; - let multi_buffer = self.buffer().read(cx); - let Some(buffer) = multi_buffer.buffer(*buffer_id) else { + let Some((_, end_buffer, _)) = self.buffer().read(cx).excerpt_containing(selection.end, cx) + else { return; }; + if buffer != end_buffer { + log::error!("expected anchor range to have matching buffer IDs"); + return; + } let id = post_inc(&mut self.next_completion_id); let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; @@ -9545,7 +9609,7 @@ impl Editor { .tabstops .iter() .map(|tabstop| { - let is_end_tabstop = tabstop.ranges.first().map_or(false, |tabstop| { + let is_end_tabstop = tabstop.ranges.first().is_some_and(|tabstop| { tabstop.is_empty() && tabstop.start == snippet.text.len() as isize }); let mut tabstop_ranges = tabstop @@ -9583,10 +9647,10 @@ impl Editor { s.select_ranges(tabstop.ranges.iter().rev().cloned()); }); - if let Some(choices) = &tabstop.choices { - if let Some(selection) = tabstop.ranges.first() { - self.show_snippet_choices(choices, selection.clone(), cx) - } + if let Some(choices) = &tabstop.choices + && let Some(selection) = tabstop.ranges.first() + { + self.show_snippet_choices(choices, selection.clone(), cx) } // If we're already at the last tabstop and it's at the end of the snippet, @@ -9720,10 +9784,10 @@ impl Editor { s.select_ranges(current_ranges.iter().rev().cloned()) }); - if let Some(choices) = &snippet.choices[snippet.active_index] { - if let Some(selection) = current_ranges.first() { - self.show_snippet_choices(&choices, selection.clone(), cx); - } + if let Some(choices) = &snippet.choices[snippet.active_index] + && let Some(selection) = current_ranges.first() + { + self.show_snippet_choices(choices, selection.clone(), cx); } // If snippet state is not at the last tabstop, push it back on the stack @@ -10120,10 +10184,10 @@ impl Editor { // Avoid re-outdenting a row that has already been outdented by a // previous selection. - if let Some(last_row) = last_outdent { - if last_row == rows.start { - rows.start = rows.start.next_row(); - } + if let Some(last_row) = last_outdent + && last_row == rows.start + { + rows.start = rows.start.next_row(); } let has_multiple_rows = rows.len() > 1; for row in rows.iter_rows() { @@ -10301,11 +10365,11 @@ impl Editor { MultiBufferRow(selection.end.row) }; - if let Some(last_row_range) = row_ranges.last_mut() { - if start <= last_row_range.end { - last_row_range.end = end; - continue; - } + if let Some(last_row_range) = row_ranges.last_mut() + && start <= last_row_range.end + { + last_row_range.end = end; + continue; } row_ranges.push(start..end); } @@ -10483,7 +10547,7 @@ impl Editor { ) { if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { let project_path = buffer.read(cx).project_path(cx)?; - let project = self.project.as_ref()?.read(cx); + let project = self.project()?.read(cx); let entry = project.entry_for_path(&project_path, cx)?; let parent = match &entry.canonical_path { Some(canonical_path) => canonical_path.to_path_buf(), @@ -10586,16 +10650,12 @@ impl Editor { snapshot: &EditorSnapshot, cx: &mut Context, ) -> Option<(Anchor, Breakpoint)> { - let project = self.project.clone()?; - - let buffer_id = breakpoint_position.buffer_id.or_else(|| { - snapshot - .buffer_snapshot - .buffer_id_for_excerpt(breakpoint_position.excerpt_id) - })?; + let buffer = self + .buffer + .read(cx) + .buffer_for_anchor(breakpoint_position, cx)?; let enclosing_excerpt = breakpoint_position.excerpt_id; - let buffer = project.read(cx).buffer_for_id(buffer_id, cx)?; let buffer_snapshot = buffer.read(cx).snapshot(); let row = buffer_snapshot @@ -10607,8 +10667,7 @@ impl Editor { .buffer_snapshot .anchor_after(Point::new(row, line_len)); - let bp = self - .breakpoint_store + self.breakpoint_store .as_ref()? .read_with(cx, |breakpoint_store, cx| { breakpoint_store @@ -10633,8 +10692,7 @@ impl Editor { None } }) - }); - bp + }) } pub fn edit_log_breakpoint( @@ -10670,7 +10728,7 @@ impl Editor { let cursors = self .selections .disjoint_anchors() - .into_iter() + .iter() .map(|selection| { let cursor_position: Point = selection.head().to_point(&snapshot.buffer_snapshot); @@ -10770,21 +10828,11 @@ impl Editor { return; }; - let Some(buffer_id) = breakpoint_position.buffer_id.or_else(|| { - if breakpoint_position == Anchor::min() { - self.buffer() - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .next() - } else { - None - } - }) else { - return; - }; - - let Some(buffer) = self.buffer().read(cx).buffer(buffer_id) else { + let Some(buffer) = self + .buffer + .read(cx) + .buffer_for_anchor(breakpoint_position, cx) + else { return; }; @@ -11012,7 +11060,7 @@ impl Editor { let mut col = 0; let mut changed = false; - while let Some(ch) = chars.next() { + for ch in chars.by_ref() { match ch { ' ' => { reindented_line.push(' '); @@ -11068,7 +11116,7 @@ impl Editor { let mut first_non_indent_char = None; let mut changed = false; - while let Some(ch) = chars.next() { + for ch in chars.by_ref() { match ch { ' ' => { // Keep track of spaces. Append \t when we reach tab_size @@ -11676,7 +11724,7 @@ impl Editor { let transpose_start = display_map .buffer_snapshot .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); - if edits.last().map_or(true, |e| e.0.end <= transpose_start) { + if edits.last().is_none_or(|e| e.0.end <= transpose_start) { let transpose_end = display_map .buffer_snapshot .clip_offset(transpose_offset + 1, Bias::Right); @@ -12158,6 +12206,8 @@ impl Editor { let clipboard_text = Cow::Borrowed(text); self.transact(window, cx, |this, window, cx| { + let had_active_edit_prediction = this.has_active_edit_prediction(); + if let Some(mut clipboard_selections) = clipboard_selections { let old_selections = this.selections.all::(cx); let all_selections_were_entire_line = @@ -12230,6 +12280,11 @@ impl Editor { } else { this.insert(&clipboard_text, window, cx); } + + let trigger_in_words = + this.show_edit_predictions_in_menu() || !had_active_edit_prediction; + + this.trigger_completion_on_input(text, trigger_in_words, window, cx); }); } @@ -12583,7 +12638,7 @@ impl Editor { return; } - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -12707,7 +12762,7 @@ impl Editor { return; } - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13191,7 +13246,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13212,7 +13267,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13233,7 +13288,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13254,7 +13309,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13275,7 +13330,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13300,7 +13355,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13325,7 +13380,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13350,7 +13405,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13375,7 +13430,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13396,7 +13451,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13417,7 +13472,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13438,7 +13493,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13459,7 +13514,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13484,7 +13539,7 @@ impl Editor { } pub fn move_to_end(&mut self, _: &MoveToEnd, window: &mut Window, cx: &mut Context) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -13587,7 +13642,7 @@ impl Editor { pub fn split_selection_into_lines( &mut self, - _: &SplitSelectionIntoLines, + action: &SplitSelectionIntoLines, window: &mut Window, cx: &mut Context, ) { @@ -13604,8 +13659,21 @@ impl Editor { let buffer = self.buffer.read(cx).read(cx); for selection in selections { for row in selection.start.row..selection.end.row { - let cursor = Point::new(row, buffer.line_len(MultiBufferRow(row))); - new_selection_ranges.push(cursor..cursor); + let line_start = Point::new(row, 0); + let line_end = Point::new(row, buffer.line_len(MultiBufferRow(row))); + + if action.keep_selections { + // Keep the selection range for each line + let selection_start = if row == selection.start.row { + selection.start + } else { + line_start + }; + new_selection_ranges.push(selection_start..line_end); + } else { + // Collapse to cursor at end of line + new_selection_ranges.push(line_end..line_end); + } } let is_multiline_selection = selection.start.row != selection.end.row; @@ -13613,7 +13681,16 @@ impl Editor { // so this action feels more ergonomic when paired with other selection operations let should_skip_last = is_multiline_selection && selection.end.column == 0; if !should_skip_last { - new_selection_ranges.push(selection.end..selection.end); + if action.keep_selections { + if is_multiline_selection { + let line_start = Point::new(selection.end.row, 0); + new_selection_ranges.push(line_start..selection.end); + } else { + new_selection_ranges.push(selection.start..selection.end); + } + } else { + new_selection_ranges.push(selection.end..selection.end); + } } } } @@ -14511,7 +14588,7 @@ impl Editor { let advance_downwards = action.advance_downwards && selections_on_single_row && !selections_selecting - && !matches!(this.mode, EditorMode::SingleLine { .. }); + && !matches!(this.mode, EditorMode::SingleLine); if advance_downwards { let snapshot = this.buffer.read(cx).snapshot(cx); @@ -14757,15 +14834,18 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); - let old_selections: Box<[_]> = self.selections.all::(cx).into(); + let selections = self + .selections + .all::(cx) + .into_iter() + // subtracting the offset requires sorting + .sorted_by_key(|i| i.start); - let edits = old_selections - .iter() - // only consider the first selection for now - .take(1) - .map(|selection| { + let full_edits = selections + .into_iter() + .filter_map(|selection| { // Only requires two branches once if-let-chains stabilize (#53667) - let selection_range = if !selection.is_empty() { + let child = if !selection.is_empty() { selection.range() } else if let Some((_, ancestor_range)) = buffer.syntax_ancestor(selection.start..selection.end) @@ -14778,48 +14858,52 @@ impl Editor { selection.range() }; - let mut new_range = selection_range.clone(); - while let Some((_, ancestor_range)) = buffer.syntax_ancestor(new_range.clone()) { - new_range = match ancestor_range { + let mut parent = child.clone(); + while let Some((_, ancestor_range)) = buffer.syntax_ancestor(parent.clone()) { + parent = match ancestor_range { MultiOrSingleBufferOffsetRange::Single(range) => range, MultiOrSingleBufferOffsetRange::Multi(range) => range, }; - if new_range.start < selection_range.start - || new_range.end > selection_range.end - { + if parent.start < child.start || parent.end > child.end { break; } } - (selection, selection_range, new_range) + if parent == child { + return None; + } + let text = buffer.text_for_range(child).collect::(); + Some((selection.id, parent, text)) }) .collect::>(); - self.transact(window, cx, |editor, window, cx| { - for (_, child, parent) in &edits { - let text = buffer.text_for_range(child.clone()).collect::(); - editor.replace_text_in_range(Some(parent.clone()), &text, window, cx); - } - - editor.change_selections( - SelectionEffects::scroll(Autoscroll::fit()), - window, - cx, - |s| { - s.select( - edits - .iter() - .map(|(s, old, new)| Selection { - id: s.id, - start: new.start, - end: new.start + old.len(), - goal: SelectionGoal::None, - reversed: s.reversed, - }) - .collect(), - ); - }, - ); + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit( + full_edits + .iter() + .map(|(_, p, t)| (p.clone(), t.clone())) + .collect::>(), + None, + cx, + ); + }); + this.change_selections(Default::default(), window, cx, |s| { + let mut offset = 0; + let mut selections = vec![]; + for (id, parent, text) in full_edits { + let start = parent.start - offset; + offset += parent.len() - text.len(); + selections.push(Selection { + id, + start, + end: start + text.len(), + reversed: false, + goal: Default::default(), + }); + } + s.select(selections); + }); }); } @@ -14828,7 +14912,7 @@ impl Editor { self.clear_tasks(); return Task::ready(()); } - let project = self.project.as_ref().map(Entity::downgrade); + let project = self.project().map(Entity::downgrade); let task_sources = self.lsp_task_sources(cx); let multi_buffer = self.buffer.downgrade(); cx.spawn_in(window, async move |editor, cx| { @@ -14843,10 +14927,7 @@ impl Editor { }; let hide_runnables = project - .update(cx, |project, cx| { - // Do not display any test indicators in non-dev server remote projects. - project.is_via_collab() && project.ssh_connection_string(cx).is_none() - }) + .update(cx, |project, _| project.is_via_collab()) .unwrap_or(true); if hide_runnables { return; @@ -15239,17 +15320,15 @@ impl Editor { if direction == ExpandExcerptDirection::Down { let multi_buffer = self.buffer.read(cx); let snapshot = multi_buffer.snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt) { - if let Some(buffer) = multi_buffer.buffer(buffer_id) { - if let Some(excerpt_range) = snapshot.buffer_range_for_excerpt(excerpt) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let excerpt_end_row = - Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; - let last_row = buffer_snapshot.max_point().row; - let lines_below = last_row.saturating_sub(excerpt_end_row); - should_scroll_up = lines_below >= lines_to_expand; - } - } + if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt) + && let Some(buffer) = multi_buffer.buffer(buffer_id) + && let Some(excerpt_range) = snapshot.buffer_range_for_excerpt(excerpt) + { + let buffer_snapshot = buffer.read(cx).snapshot(); + let excerpt_end_row = Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; + let last_row = buffer_snapshot.max_point().row; + let lines_below = last_row.saturating_sub(excerpt_end_row); + should_scroll_up = lines_below >= lines_to_expand; } } @@ -15334,10 +15413,10 @@ impl Editor { let selection = self.selections.newest::(cx); let mut active_group_id = None; - if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics { - if active_group.active_range.start.to_offset(&buffer) == selection.start { - active_group_id = Some(active_group.group_id); - } + if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics + && active_group.active_range.start.to_offset(&buffer) == selection.start + { + active_group_id = Some(active_group.group_id); } fn filtered( @@ -15396,7 +15475,8 @@ impl Editor { return; }; - let Some(buffer_id) = buffer.anchor_after(next_diagnostic.range.start).buffer_id else { + let next_diagnostic_start = buffer.anchor_after(next_diagnostic.range.start); + let Some(buffer_id) = buffer.buffer_id_for_anchor(next_diagnostic_start) else { return; }; self.change_selections(Default::default(), window, cx, |s| { @@ -15674,7 +15754,9 @@ impl Editor { }; cx.spawn_in(window, async move |editor, cx| { - let definitions = definitions.await?; + let Some(definitions) = definitions.await? else { + return Ok(Navigated::No); + }; let navigated = editor .update_in(cx, |editor, window, cx| { editor.navigate_to_hover_links( @@ -15817,19 +15899,30 @@ impl Editor { let tab_kind = match kind { Some(GotoDefinitionKind::Implementation) => "Implementations", - _ => "Definitions", + Some(GotoDefinitionKind::Symbol) | None => "Definitions", + Some(GotoDefinitionKind::Declaration) => "Declarations", + Some(GotoDefinitionKind::Type) => "Types", }; let title = editor .update_in(acx, |_, _, cx| { - let origin = locations.first().unwrap(); - let buffer = origin.buffer.read(cx); - format!( - "{} for {}", - tab_kind, - buffer - .text_for_range(origin.range.clone()) - .collect::() - ) + let target = locations + .iter() + .map(|location| { + location + .buffer + .read(cx) + .text_for_range(location.range.clone()) + .collect::() + }) + .filter(|text| !text.contains('\n')) + .unique() + .take(3) + .join(", "); + if target.is_empty() { + tab_kind.to_owned() + } else { + format!("{tab_kind} for {target}") + } }) .context("buffer title")?; @@ -15885,7 +15978,7 @@ impl Editor { if !split && Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() { - editor.go_to_singleton_buffer_range(range.clone(), window, cx); + editor.go_to_singleton_buffer_range(range, window, cx); } else { window.defer(cx, move |window, cx| { let target_editor: Entity = @@ -15934,38 +16027,24 @@ impl Editor { cx.spawn_in(window, async move |editor, cx| { let location_task = editor.update(cx, |_, cx| { project.update(cx, |project, cx| { - let language_server_name = project - .language_server_statuses(cx) - .find(|(id, _)| server_id == *id) - .map(|(_, status)| status.name.clone()); - language_server_name.map(|language_server_name| { - project.open_local_buffer_via_lsp( - lsp_location.uri.clone(), - server_id, - language_server_name, - cx, - ) - }) + project.open_local_buffer_via_lsp(lsp_location.uri.clone(), server_id, cx) }) })?; - let location = match location_task { - Some(task) => Some({ - let target_buffer_handle = task.await.context("open local buffer")?; - let range = target_buffer_handle.read_with(cx, |target_buffer, _| { - let target_start = target_buffer - .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); - let target_end = target_buffer - .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); - target_buffer.anchor_after(target_start) - ..target_buffer.anchor_before(target_end) - })?; - Location { - buffer: target_buffer_handle, - range, - } - }), - None => None, - }; + let location = Some({ + let target_buffer_handle = location_task.await.context("open local buffer")?; + let range = target_buffer_handle.read_with(cx, |target_buffer, _| { + let target_start = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); + let target_end = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); + target_buffer.anchor_after(target_start) + ..target_buffer.anchor_before(target_end) + })?; + Location { + buffer: target_buffer_handle, + range, + } + }); Ok(location) }) } @@ -16019,25 +16098,32 @@ impl Editor { } }); - let locations = references.await?; + let Some(locations) = references.await? else { + return anyhow::Ok(Navigated::No); + }; if locations.is_empty() { return anyhow::Ok(Navigated::No); } workspace.update_in(cx, |workspace, window, cx| { - let title = locations - .first() - .as_ref() + let target = locations + .iter() .map(|location| { - let buffer = location.buffer.read(cx); - format!( - "References to `{}`", - buffer - .text_for_range(location.range.clone()) - .collect::() - ) + location + .buffer + .read(cx) + .text_for_range(location.range.clone()) + .collect::() }) - .unwrap(); + .filter(|text| !text.contains('\n')) + .unique() + .take(3) + .join(", "); + let title = if target.is_empty() { + "References".to_owned() + } else { + format!("References to {target}") + }; Self::open_locations_in_multibuffer( workspace, locations, @@ -16152,24 +16238,22 @@ impl Editor { let item_id = item.item_id(); if split { - workspace.split_item(SplitDirection::Right, item.clone(), window, cx); - } else { - if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation { - let (preview_item_id, preview_item_idx) = - workspace.active_pane().read_with(cx, |pane, _| { - (pane.preview_item_id(), pane.preview_item_idx()) - }); + workspace.split_item(SplitDirection::Right, item, window, cx); + } else if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation { + let (preview_item_id, preview_item_idx) = + workspace.active_pane().read_with(cx, |pane, _| { + (pane.preview_item_id(), pane.preview_item_idx()) + }); - workspace.add_item_to_active_pane(item.clone(), preview_item_idx, true, window, cx); + workspace.add_item_to_active_pane(item, preview_item_idx, true, window, cx); - if let Some(preview_item_id) = preview_item_id { - workspace.active_pane().update(cx, |pane, cx| { - pane.remove_item(preview_item_id, false, false, window, cx); - }); - } - } else { - workspace.add_item_to_active_pane(item.clone(), None, true, window, cx); + if let Some(preview_item_id) = preview_item_id { + workspace.active_pane().update(cx, |pane, cx| { + pane.remove_item(preview_item_id, false, false, window, cx); + }); } + } else { + workspace.add_item_to_active_pane(item, None, true, window, cx); } workspace.active_pane().update(cx, |pane, cx| { pane.set_preview_item_id(Some(item_id), cx); @@ -16559,10 +16643,7 @@ impl Editor { .transaction(transaction_id_prev) .map(|t| t.0.clone()) }) - .unwrap_or_else(|| { - log::info!("Failed to determine selections from before format. Falling back to selections when format was initiated"); - self.selections.disjoint_anchors() - }); + .unwrap_or_else(|| self.selections.disjoint_anchors()); let mut timeout = cx.background_executor().timer(FORMAT_TIMEOUT).fuse(); let format = project.update(cx, |project, cx| { @@ -16580,10 +16661,10 @@ impl Editor { buffer .update(cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !buffer.is_singleton() { - buffer.push_transaction(&transaction.0, cx); - } + if let Some(transaction) = transaction + && !buffer.is_singleton() + { + buffer.push_transaction(&transaction.0, cx); } cx.notify(); }) @@ -16649,10 +16730,10 @@ impl Editor { buffer .update(cx, |buffer, cx| { // check if we need this - if let Some(transaction) = transaction { - if !buffer.is_singleton() { - buffer.push_transaction(&transaction.0, cx); - } + if let Some(transaction) = transaction + && !buffer.is_singleton() + { + buffer.push_transaction(&transaction.0, cx); } cx.notify(); }) @@ -17001,7 +17082,7 @@ impl Editor { if !pull_diagnostics_settings.enabled { return None; } - let project = self.project.as_ref()?.downgrade(); + let project = self.project()?.downgrade(); let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms); let mut buffers = self.buffer.read(cx).all_buffers(); if let Some(buffer_id) = buffer_id { @@ -17284,12 +17365,12 @@ impl Editor { } for row in (0..=range.start.row).rev() { - if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { - if crease.range().end.row >= buffer_start_row { - to_fold.push(crease); - if row <= range.start.row { - break; - } + if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) + && crease.range().end.row >= buffer_start_row + { + to_fold.push(crease); + if row <= range.start.row { + break; } } } @@ -17817,7 +17898,7 @@ impl Editor { ranges: &[Range], snapshot: &MultiBufferSnapshot, ) -> bool { - let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot); + let mut hunks = self.diff_hunks_in_ranges(ranges, snapshot); hunks.any(|hunk| hunk.status().has_secondary_hunk()) } @@ -17965,7 +18046,7 @@ impl Editor { hunks: impl Iterator, cx: &mut App, ) -> Option<()> { - let project = self.project.as_ref()?; + let project = self.project()?; let buffer = project.read(cx).buffer_for_id(buffer_id, cx)?; let diff = self.buffer.read(cx).diff_for(buffer_id)?; let buffer_snapshot = buffer.read(cx).snapshot(); @@ -18599,10 +18680,10 @@ impl Editor { pub fn working_directory(&self, cx: &App) -> Option { if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(dir) = file.abs_path(cx).parent() { - return Some(dir.to_owned()); - } + if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) + && let Some(dir) = file.abs_path(cx).parent() + { + return Some(dir.to_owned()); } if let Some(project_path) = buffer.read(cx).project_path(cx) { @@ -18625,7 +18706,7 @@ impl Editor { self.active_excerpt(cx).and_then(|(_, buffer, _)| { let buffer = buffer.read(cx); if let Some(project_path) = buffer.project_path(cx) { - let project = self.project.as_ref()?.read(cx); + let project = self.project()?.read(cx); project.absolute_path(&project_path, cx) } else { buffer @@ -18638,7 +18719,7 @@ impl Editor { fn target_file_path(&self, cx: &mut Context) -> Option { self.active_excerpt(cx).and_then(|(_, buffer, _)| { let project_path = buffer.read(cx).project_path(cx)?; - let project = self.project.as_ref()?.read(cx); + let project = self.project()?.read(cx); let entry = project.entry_for_path(&project_path, cx)?; let path = entry.path.to_path_buf(); Some(path) @@ -18662,10 +18743,10 @@ impl Editor { _window: &mut Window, cx: &mut Context, ) { - if let Some(path) = self.target_file_abs_path(cx) { - if let Some(path) = path.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } + if let Some(path) = self.target_file_abs_path(cx) + && let Some(path) = path.to_str() + { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } } @@ -18675,10 +18756,10 @@ impl Editor { _window: &mut Window, cx: &mut Context, ) { - if let Some(path) = self.target_file_path(cx) { - if let Some(path) = path.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } + if let Some(path) = self.target_file_path(cx) + && let Some(path) = path.to_str() + { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } } @@ -18747,22 +18828,20 @@ impl Editor { _: &mut Window, cx: &mut Context, ) { - if let Some(file) = self.target_file(cx) { - if let Some(file_stem) = file.path().file_stem() { - if let Some(name) = file_stem.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); - } - } + if let Some(file) = self.target_file(cx) + && let Some(file_stem) = file.path().file_stem() + && let Some(name) = file_stem.to_str() + { + cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); } } pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { - if let Some(file) = self.target_file(cx) { - if let Some(file_name) = file.path().file_name() { - if let Some(name) = file_name.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); - } - } + if let Some(file) = self.target_file(cx) + && let Some(file_name) = file.path().file_name() + && let Some(name) = file_name.to_str() + { + cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); } } @@ -18859,7 +18938,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if let Some(project) = self.project.as_ref() { + if let Some(project) = self.project() { let Some(buffer) = self.buffer().read(cx).as_singleton() else { return; }; @@ -18936,7 +19015,7 @@ impl Editor { fn has_blame_entries(&self, cx: &App) -> bool { self.blame() - .map_or(false, |blame| blame.read(cx).has_generated_entries()) + .is_some_and(|blame| blame.read(cx).has_generated_entries()) } fn newest_selection_head_on_empty_line(&self, cx: &App) -> bool { @@ -18963,19 +19042,16 @@ impl Editor { buffer_ranges.last() }?; - let selection = text::ToPoint::to_point(&range.start, &buffer).row - ..text::ToPoint::to_point(&range.end, &buffer).row; - Some(( - multi_buffer.buffer(buffer.remote_id()).unwrap().clone(), - selection, - )) + let selection = text::ToPoint::to_point(&range.start, buffer).row + ..text::ToPoint::to_point(&range.end, buffer).row; + Some((multi_buffer.buffer(buffer.remote_id()).unwrap(), selection)) }); let Some((buffer, selection)) = buffer_and_selection else { return Task::ready(Err(anyhow!("failed to determine buffer and selection"))); }; - let Some(project) = self.project.as_ref() else { + let Some(project) = self.project() else { return Task::ready(Err(anyhow!("editor does not have project"))); }; @@ -19032,10 +19108,10 @@ impl Editor { cx: &mut Context, ) { let selection = self.selections.newest::(cx).start.row + 1; - if let Some(file) = self.target_file(cx) { - if let Some(path) = file.path().to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); - } + if let Some(file) = self.target_file(cx) + && let Some(path) = file.path().to_str() + { + cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); } } @@ -19139,7 +19215,7 @@ impl Editor { let locations = self .selections .all_anchors(cx) - .into_iter() + .iter() .map(|selection| Location { buffer: buffer.clone(), range: selection.start.text_anchor..selection.end.text_anchor, @@ -19210,7 +19286,7 @@ impl Editor { row_highlights.insert( ix, RowHighlight { - range: range.clone(), + range, index, color, options, @@ -19586,7 +19662,7 @@ impl Editor { pub fn has_background_highlights(&self) -> bool { self.background_highlights .get(&HighlightKey::Type(TypeId::of::())) - .map_or(false, |(_, highlights)| !highlights.is_empty()) + .is_some_and(|(_, highlights)| !highlights.is_empty()) } pub fn background_highlights_in_range( @@ -19675,10 +19751,10 @@ impl Editor { break; } let end = range.end.to_point(&display_snapshot.buffer_snapshot); - if let Some(current_row) = &end_row { - if end.row == current_row.row { - continue; - } + if let Some(current_row) = &end_row + && end.row == current_row.row + { + continue; } let start = range.start.to_point(&display_snapshot.buffer_snapshot); if start_row.is_none() { @@ -19851,11 +19927,8 @@ impl Editor { event: &SessionEvent, cx: &mut Context, ) { - match event { - SessionEvent::InvalidateInlineValue => { - self.refresh_inline_values(cx); - } - _ => {} + if let SessionEvent::InvalidateInlineValue = event { + self.refresh_inline_values(cx); } } @@ -19970,17 +20043,16 @@ impl Editor { if self.has_active_edit_prediction() { self.update_visible_edit_prediction(window, cx); } - if let Some(project) = self.project.as_ref() { - if let Some(edited_buffer) = edited_buffer { - project.update(cx, |project, cx| { - self.registered_buffers - .entry(edited_buffer.read(cx).remote_id()) - .or_insert_with(|| { - project - .register_buffer_with_language_servers(&edited_buffer, cx) - }); - }); - } + if let Some(project) = self.project.as_ref() + && let Some(edited_buffer) = edited_buffer + { + project.update(cx, |project, cx| { + self.registered_buffers + .entry(edited_buffer.read(cx).remote_id()) + .or_insert_with(|| { + project.register_buffer_with_language_servers(edited_buffer, cx) + }); + }); } cx.emit(EditorEvent::BufferEdited); cx.emit(SearchEvent::MatchesInvalidated); @@ -19990,10 +20062,10 @@ impl Editor { } if *singleton_buffer_edited { - if let Some(buffer) = edited_buffer { - if buffer.read(cx).file().is_none() { - cx.emit(EditorEvent::TitleChanged); - } + if let Some(buffer) = edited_buffer + && buffer.read(cx).file().is_none() + { + cx.emit(EditorEvent::TitleChanged); } if let Some(project) = &self.project { #[allow(clippy::mutable_key_type)] @@ -20039,17 +20111,17 @@ impl Editor { } => { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); let buffer_id = buffer.read(cx).remote_id(); - if self.buffer.read(cx).diff_for(buffer_id).is_none() { - if let Some(project) = &self.project { - update_uncommitted_diff_for_buffer( - cx.entity(), - project, - [buffer.clone()], - self.buffer.clone(), - cx, - ) - .detach(); - } + if self.buffer.read(cx).diff_for(buffer_id).is_none() + && let Some(project) = &self.project + { + update_uncommitted_diff_for_buffer( + cx.entity(), + project, + [buffer.clone()], + self.buffer.clone(), + cx, + ) + .detach(); } self.update_lsp_data(false, Some(buffer_id), window, cx); cx.emit(EditorEvent::ExcerptsAdded { @@ -20182,6 +20254,7 @@ impl Editor { ); let old_cursor_shape = self.cursor_shape; + let old_show_breadcrumbs = self.show_breadcrumbs; { let editor_settings = EditorSettings::get_global(cx); @@ -20195,6 +20268,10 @@ impl Editor { cx.emit(EditorEvent::CursorShapeChanged); } + if old_show_breadcrumbs != self.show_breadcrumbs { + cx.emit(EditorEvent::BreadcrumbsChanged); + } + let project_settings = ProjectSettings::get_global(cx); self.serialize_dirty_buffers = !self.mode.is_minimap() && project_settings.session.restore_unsaved_buffers; @@ -20392,11 +20469,8 @@ impl Editor { .range_to_buffer_ranges_with_deleted_hunks(selection.range()) { if let Some(anchor) = anchor { - // selection is in a deleted hunk - let Some(buffer_id) = anchor.buffer_id else { - continue; - }; - let Some(buffer_handle) = multi_buffer.buffer(buffer_id) else { + let Some(buffer_handle) = multi_buffer.buffer_for_anchor(anchor, cx) + else { continue; }; let offset = text::ToOffset::to_offset( @@ -20504,7 +20578,7 @@ impl Editor { // For now, don't allow opening excerpts in buffers that aren't backed by // regular project files. fn can_open_excerpts_in_file(file: Option<&Arc>) -> bool { - file.map_or(true, |file| project::File::from_dyn(Some(file)).is_some()) + file.is_none_or(|file| project::File::from_dyn(Some(file)).is_some()) } fn marked_text_ranges(&self, cx: &App) -> Option>> { @@ -20547,7 +20621,7 @@ impl Editor { fn report_editor_event( &self, - event_type: &'static str, + reported_event: ReportEditorEvent, file_extension: Option, cx: &App, ) { @@ -20581,15 +20655,30 @@ impl Editor { .show_edit_predictions; let project = project.read(cx); - telemetry::event!( - event_type, - file_extension, - vim_mode, - copilot_enabled, - copilot_enabled_for_language, - edit_predictions_provider, - is_via_ssh = project.is_via_ssh(), - ); + let event_type = reported_event.event_type(); + + if let ReportEditorEvent::Saved { auto_saved } = reported_event { + telemetry::event!( + event_type, + type = if auto_saved {"autosave"} else {"manual"}, + file_extension, + vim_mode, + copilot_enabled, + copilot_enabled_for_language, + edit_predictions_provider, + is_via_ssh = project.is_via_ssh(), + ); + } else { + telemetry::event!( + event_type, + file_extension, + vim_mode, + copilot_enabled, + copilot_enabled_for_language, + edit_predictions_provider, + is_via_ssh = project.is_via_ssh(), + ); + }; } /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines, @@ -20633,11 +20722,11 @@ impl Editor { let mut chunk_lines = chunk.text.split('\n').peekable(); while let Some(text) = chunk_lines.next() { let mut merged_with_last_token = false; - if let Some(last_token) = line.back_mut() { - if last_token.highlight == highlight { - last_token.text.push_str(text); - merged_with_last_token = true; - } + if let Some(last_token) = line.back_mut() + && last_token.highlight == highlight + { + last_token.text.push_str(text); + merged_with_last_token = true; } if !merged_with_last_token { @@ -20827,7 +20916,7 @@ impl Editor { let existing_pending = self .text_highlights::(cx) - .map(|(_, ranges)| ranges.iter().cloned().collect::>()); + .map(|(_, ranges)| ranges.to_vec()); if existing_pending.is_none() && pending.is_empty() { return; } @@ -20942,7 +21031,7 @@ impl Editor { cx: &mut Context, ) { let workspace = self.workspace(); - let project = self.project.as_ref(); + let project = self.project(); let save_tasks = self.buffer().update(cx, |multi_buffer, cx| { let mut tasks = Vec::new(); for (buffer_id, changes) in revert_changes { @@ -20980,7 +21069,7 @@ impl Editor { }; if let Some((workspace, path)) = workspace.as_ref().zip(path) { let Some(task) = cx - .update_window_entity(&workspace, |workspace, window, cx| { + .update_window_entity(workspace, |workspace, window, cx| { workspace .open_path_preview(path, None, false, false, false, window, cx) }) @@ -21032,7 +21121,7 @@ impl Editor { pub fn has_visible_completions_menu(&self) -> bool { !self.edit_prediction_preview_is_active() - && self.context_menu.borrow().as_ref().map_or(false, |menu| { + && self.context_menu.borrow().as_ref().is_some_and(|menu| { menu.visible() && matches!(menu, CodeContextMenu::Completions(_)) }) } @@ -21096,39 +21185,37 @@ impl Editor { { let buffer_snapshot = OnceCell::new(); - if let Some(folds) = DB.get_editor_folds(item_id, workspace_id).log_err() { - if !folds.is_empty() { - let snapshot = - buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); - self.fold_ranges( - folds - .into_iter() - .map(|(start, end)| { - snapshot.clip_offset(start, Bias::Left) - ..snapshot.clip_offset(end, Bias::Right) - }) - .collect(), - false, - window, - cx, - ); - } - } - - if let Some(selections) = DB.get_editor_selections(item_id, workspace_id).log_err() { - if !selections.is_empty() { - let snapshot = - buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); - // skip adding the initial selection to selection history - self.selection_history.mode = SelectionHistoryMode::Skipping; - self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(selections.into_iter().map(|(start, end)| { + if let Some(folds) = DB.get_editor_folds(item_id, workspace_id).log_err() + && !folds.is_empty() + { + let snapshot = buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); + self.fold_ranges( + folds + .into_iter() + .map(|(start, end)| { snapshot.clip_offset(start, Bias::Left) ..snapshot.clip_offset(end, Bias::Right) - })); - }); - self.selection_history.mode = SelectionHistoryMode::Normal; - } + }) + .collect(), + false, + window, + cx, + ); + } + + if let Some(selections) = DB.get_editor_selections(item_id, workspace_id).log_err() + && !selections.is_empty() + { + let snapshot = buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); + // skip adding the initial selection to selection history + self.selection_history.mode = SelectionHistoryMode::Skipping; + self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges(selections.into_iter().map(|(start, end)| { + snapshot.clip_offset(start, Bias::Left) + ..snapshot.clip_offset(end, Bias::Right) + })); + }); + self.selection_history.mode = SelectionHistoryMode::Normal; }; } @@ -21170,17 +21257,15 @@ fn process_completion_for_edit( let mut snippet_source = completion.new_text.clone(); let mut previous_point = text::ToPoint::to_point(cursor_position, buffer); previous_point.column = previous_point.column.saturating_sub(1); - if let Some(scope) = buffer_snapshot.language_scope_at(previous_point) { - if scope.prefers_label_for_snippet_in_completion() { - if let Some(label) = completion.label() { - if matches!( - completion.kind(), - Some(CompletionItemKind::FUNCTION) | Some(CompletionItemKind::METHOD) - ) { - snippet_source = label; - } - } - } + if let Some(scope) = buffer_snapshot.language_scope_at(previous_point) + && scope.prefers_label_for_snippet_in_completion() + && let Some(label) = completion.label() + && matches!( + completion.kind(), + Some(CompletionItemKind::FUNCTION) | Some(CompletionItemKind::METHOD) + ) + { + snippet_source = label; } match Snippet::parse(&snippet_source).log_err() { Some(parsed_snippet) => (Some(parsed_snippet.clone()), parsed_snippet.text), @@ -21204,14 +21289,14 @@ fn process_completion_for_edit( debug_assert!( insert_range .start - .cmp(&cursor_position, &buffer_snapshot) + .cmp(cursor_position, &buffer_snapshot) .is_le(), "insert_range should start before or at cursor position" ); debug_assert!( replace_range .start - .cmp(&cursor_position, &buffer_snapshot) + .cmp(cursor_position, &buffer_snapshot) .is_le(), "replace_range should start before or at cursor position" ); @@ -21234,10 +21319,10 @@ fn process_completion_for_edit( ); let mut current_needle = text_to_replace.next(); for haystack_ch in completion.label.text.chars() { - if let Some(needle_ch) = current_needle { - if haystack_ch.eq_ignore_ascii_case(&needle_ch) { - current_needle = text_to_replace.next(); - } + if let Some(needle_ch) = current_needle + && haystack_ch.eq_ignore_ascii_case(&needle_ch) + { + current_needle = text_to_replace.next(); } } current_needle.is_none() @@ -21245,7 +21330,7 @@ fn process_completion_for_edit( LspInsertMode::ReplaceSuffix => { if replace_range .end - .cmp(&cursor_position, &buffer_snapshot) + .cmp(cursor_position, &buffer_snapshot) .is_gt() { let range_after_cursor = *cursor_position..replace_range.end; @@ -21281,7 +21366,7 @@ fn process_completion_for_edit( if range_to_replace .end - .cmp(&cursor_position, &buffer_snapshot) + .cmp(cursor_position, &buffer_snapshot) .is_lt() { range_to_replace.end = *cursor_position; @@ -21289,7 +21374,7 @@ fn process_completion_for_edit( CompletionEdit { new_text, - replace_range: range_to_replace.to_offset(&buffer), + replace_range: range_to_replace.to_offset(buffer), snippet, } } @@ -21459,9 +21544,9 @@ fn is_grapheme_whitespace(text: &str) -> bool { } fn should_stay_with_preceding_ideograph(text: &str) -> bool { - text.chars().next().map_or(false, |ch| { - matches!(ch, '。' | '、' | ',' | '?' | '!' | ':' | ';' | '…') - }) + text.chars() + .next() + .is_some_and(|ch| matches!(ch, '。' | '、' | ',' | '?' | '!' | ':' | ';' | '…')) } #[derive(PartialEq, Eq, Debug, Clone, Copy)] @@ -21491,20 +21576,20 @@ impl<'a> Iterator for WordBreakingTokenizer<'a> { offset += first_grapheme.len(); grapheme_len += 1; if is_grapheme_ideographic(first_grapheme) && !is_whitespace { - if let Some(grapheme) = iter.peek().copied() { - if should_stay_with_preceding_ideograph(grapheme) { - offset += grapheme.len(); - grapheme_len += 1; - } + if let Some(grapheme) = iter.peek().copied() + && should_stay_with_preceding_ideograph(grapheme) + { + offset += grapheme.len(); + grapheme_len += 1; } } else { let mut words = self.input[offset..].split_word_bound_indices().peekable(); let mut next_word_bound = words.peek().copied(); - if next_word_bound.map_or(false, |(i, _)| i == 0) { + if next_word_bound.is_some_and(|(i, _)| i == 0) { next_word_bound = words.next(); } while let Some(grapheme) = iter.peek().copied() { - if next_word_bound.map_or(false, |(i, _)| i == offset) { + if next_word_bound.is_some_and(|(i, _)| i == offset) { break; }; if is_grapheme_whitespace(grapheme) != is_whitespace @@ -21625,7 +21710,7 @@ fn wrap_with_prefix( let subsequent_lines_prefix_len = char_len_with_expanded_tabs(0, &subsequent_lines_prefix, tab_size); let mut wrapped_text = String::new(); - let mut current_line = first_line_prefix.clone(); + let mut current_line = first_line_prefix; let mut is_first_line = true; let tokenizer = WordBreakingTokenizer::new(&unwrapped_text); @@ -21797,7 +21882,7 @@ pub trait SemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>>; + ) -> Option>>>; fn inline_values( &self, @@ -21836,7 +21921,7 @@ pub trait SemanticsProvider { position: text::Anchor, kind: GotoDefinitionKind, cx: &mut App, - ) -> Option>>>; + ) -> Option>>>>; fn range_for_rename( &self, @@ -21949,7 +22034,13 @@ impl CodeActionProvider for Entity { Ok(code_lens_actions .context("code lens fetch")? .into_iter() - .chain(code_actions.context("code action fetch")?) + .flatten() + .chain( + code_actions + .context("code action fetch")? + .into_iter() + .flatten(), + ) .collect()) }) }) @@ -22038,7 +22129,7 @@ fn snippet_completions( snippet .prefix .iter() - .map(move |prefix| StringMatchCandidate::new(ix, &prefix)) + .map(move |prefix| StringMatchCandidate::new(ix, prefix)) }) .collect::>(); @@ -22244,7 +22335,7 @@ impl SemanticsProvider for Entity { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>> { + ) -> Option>>> { Some(self.update(cx, |project, cx| project.hover(buffer, position, cx))) } @@ -22265,12 +22356,12 @@ impl SemanticsProvider for Entity { position: text::Anchor, kind: GotoDefinitionKind, cx: &mut App, - ) -> Option>>> { + ) -> Option>>>> { Some(self.update(cx, |project, cx| match kind { - GotoDefinitionKind::Symbol => project.definitions(&buffer, position, cx), - GotoDefinitionKind::Declaration => project.declarations(&buffer, position, cx), - GotoDefinitionKind::Type => project.type_definitions(&buffer, position, cx), - GotoDefinitionKind::Implementation => project.implementations(&buffer, position, cx), + GotoDefinitionKind::Symbol => project.definitions(buffer, position, cx), + GotoDefinitionKind::Declaration => project.declarations(buffer, position, cx), + GotoDefinitionKind::Type => project.type_definitions(buffer, position, cx), + GotoDefinitionKind::Implementation => project.implementations(buffer, position, cx), })) } @@ -22801,6 +22892,7 @@ pub enum EditorEvent { }, Reloaded, CursorShapeChanged, + BreadcrumbsChanged, PushedToNavHistory { anchor: Anchor, is_deactivate: bool, @@ -22820,7 +22912,7 @@ impl Render for Editor { let settings = ThemeSettings::get_global(cx); let mut text_style = match self.mode { - EditorMode::SingleLine { .. } | EditorMode::AutoHeight { .. } => TextStyle { + EditorMode::SingleLine | EditorMode::AutoHeight { .. } => TextStyle { color: cx.theme().colors().editor_foreground, font_family: settings.ui_font.family.clone(), font_features: settings.ui_font.features.clone(), @@ -22846,7 +22938,7 @@ impl Render for Editor { } let background = match self.mode { - EditorMode::SingleLine { .. } => cx.theme().system().transparent, + EditorMode::SingleLine => cx.theme().system().transparent, EditorMode::AutoHeight { .. } => cx.theme().system().transparent, EditorMode::Full { .. } => cx.theme().colors().editor_background, EditorMode::Minimap { .. } => cx.theme().colors().editor_background.opacity(0.7), @@ -23678,7 +23770,7 @@ fn all_edits_insertions_or_deletions( let mut all_deletions = true; for (range, new_text) in edits.iter() { - let range_is_empty = range.to_offset(&snapshot).is_empty(); + let range_is_empty = range.to_offset(snapshot).is_empty(); let text_is_empty = new_text.is_empty(); if range_is_empty != text_is_empty { diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 3d132651b846c3654b022b4a3e0aa6f60fa25d04..1d7e04cae021dd7b755f1f80e78fd3ea83197539 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -132,6 +132,10 @@ pub struct StatusBar { /// /// Default: true pub active_language_button: bool, + /// Whether to show the cursor position button in the status bar. + /// + /// Default: true + pub cursor_position_button: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -585,6 +589,10 @@ pub struct StatusBarContent { /// /// Default: true pub active_language_button: Option, + /// Whether to show the cursor position button in the status bar. + /// + /// Default: true + pub cursor_position_button: Option, } // Toolbar related settings @@ -802,10 +810,8 @@ impl Settings for EditorSettings { if gutter.line_numbers.is_some() { old_gutter.line_numbers = gutter.line_numbers } - } else { - if gutter != GutterContent::default() { - current.gutter = Some(gutter) - } + } else if gutter != GutterContent::default() { + current.gutter = Some(gutter) } if let Some(b) = vscode.read_bool("editor.scrollBeyondLastLine") { current.scroll_beyond_last_line = Some(if b { diff --git a/crates/editor/src/editor_settings_controls.rs b/crates/editor/src/editor_settings_controls.rs index dc5557b05277da972ea36ba43ffdf08a565edda9..91022d94a8843a2e9b7e9c77137d4d2ba57bfa7f 100644 --- a/crates/editor/src/editor_settings_controls.rs +++ b/crates/editor/src/editor_settings_controls.rs @@ -88,7 +88,7 @@ impl RenderOnce for BufferFontFamilyControl { .child(Icon::new(IconName::Font)) .child(DropdownMenu::new( "buffer-font-family", - value.clone(), + value, ContextMenu::build(window, cx, |mut menu, _, cx| { let font_family_cache = FontFamilyCache::global(cx); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index b31963c9c8c694acebf072e05f693f36a81185af..96261fdb2cd82a31b6e0787b738e514c74d7c5aa 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -74,7 +74,7 @@ fn test_edit_events(cx: &mut TestAppContext) { let editor1 = cx.add_window({ let events = events.clone(); |window, cx| { - let entity = cx.entity().clone(); + let entity = cx.entity(); cx.subscribe_in( &entity, window, @@ -95,7 +95,7 @@ fn test_edit_events(cx: &mut TestAppContext) { let events = events.clone(); |window, cx| { cx.subscribe_in( - &cx.entity().clone(), + &cx.entity(), window, move |_, _, event: &EditorEvent, _, _| match event { EditorEvent::Edited { .. } => events.borrow_mut().push(("editor2", "edited")), @@ -708,7 +708,7 @@ async fn test_navigation_history(cx: &mut TestAppContext) { _ = workspace.update(cx, |_v, window, cx| { cx.new(|cx| { let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx); - let mut editor = build_editor(buffer.clone(), window, cx); + let mut editor = build_editor(buffer, window, cx); let handle = cx.entity(); editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle))); @@ -898,7 +898,7 @@ fn test_fold_action(cx: &mut TestAppContext) { .unindent(), cx, ); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -989,7 +989,7 @@ fn test_fold_action_whitespace_sensitive_language(cx: &mut TestAppContext) { .unindent(), cx, ); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -1074,7 +1074,7 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) { .unindent(), cx, ); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -1173,7 +1173,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) { .unindent(), cx, ); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -1335,7 +1335,7 @@ fn test_move_cursor_multibyte(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("🟥🟧🟨🟩🟦🟪\nabcde\nαβγδε", cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); assert_eq!('🟥'.len_utf8(), 4); @@ -1452,7 +1452,7 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { @@ -1901,6 +1901,51 @@ fn test_beginning_of_line_stop_at_indent(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_beginning_of_line_with_cursor_between_line_start_and_indent(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let move_to_beg = MoveToBeginningOfLine { + stop_at_soft_wraps: true, + stop_at_indent: true, + }; + + let editor = cx.add_window(|window, cx| { + let buffer = MultiBuffer::build_simple(" hello\nworld", cx); + build_editor(buffer, window, cx) + }); + + _ = editor.update(cx, |editor, window, cx| { + // test cursor between line_start and indent_start + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3) + ]); + }); + + // cursor should move to line_start + editor.move_to_beginning_of_line(&move_to_beg, window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + // cursor should move to indent_start + editor.move_to_beginning_of_line(&move_to_beg, window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 4)] + ); + + // cursor should move to back to line_start + editor.move_to_beginning_of_line(&move_to_beg, window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + }); +} + #[gpui::test] fn test_prev_next_word_boundary(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -2434,7 +2479,7 @@ fn test_delete_to_word_boundary(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("one two three four", cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -2482,7 +2527,7 @@ fn test_delete_to_previous_word_start_or_newline(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("one\n2\nthree\n4", cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); let del_to_prev_word_start = DeleteToPreviousWordStart { ignore_newlines: false, @@ -2518,7 +2563,7 @@ fn test_delete_to_next_word_end_or_newline(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("\none\n two\nthree\n four", cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); let del_to_next_word_end = DeleteToNextWordEnd { ignore_newlines: false, @@ -2563,7 +2608,7 @@ fn test_newline(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -2599,7 +2644,7 @@ fn test_newline_with_old_selections(cx: &mut TestAppContext) { .as_str(), cx, ); - let mut editor = build_editor(buffer.clone(), window, cx); + let mut editor = build_editor(buffer, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([ Point::new(2, 4)..Point::new(2, 5), @@ -3130,7 +3175,7 @@ fn test_insert_with_old_selections(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx); - let mut editor = build_editor(buffer.clone(), window, cx); + let mut editor = build_editor(buffer, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([3..4, 11..12, 19..20]) }); @@ -5517,7 +5562,7 @@ async fn test_rewrap(cx: &mut TestAppContext) { # ˇThis is a long comment using a pound # sign. "}, - python_language.clone(), + python_language, &mut cx, ); @@ -5624,7 +5669,7 @@ async fn test_rewrap(cx: &mut TestAppContext) { also very long and should not merge with the numbered item.ˇ» "}, - markdown_language.clone(), + markdown_language, &mut cx, ); @@ -5655,7 +5700,7 @@ async fn test_rewrap(cx: &mut TestAppContext) { // This is the second long comment block // to be wrapped.ˇ» "}, - rust_language.clone(), + rust_language, &mut cx, ); @@ -5678,7 +5723,7 @@ async fn test_rewrap(cx: &mut TestAppContext) { «\tThis is a very long indented line \tthat will be wrapped.ˇ» "}, - plaintext_language.clone(), + plaintext_language, &mut cx, ); @@ -6356,7 +6401,7 @@ async fn test_split_selection_into_lines(cx: &mut TestAppContext) { fn test(cx: &mut EditorTestContext, initial_state: &'static str, expected_state: &'static str) { cx.set_state(initial_state); cx.update_editor(|e, window, cx| { - e.split_selection_into_lines(&SplitSelectionIntoLines, window, cx) + e.split_selection_into_lines(&Default::default(), window, cx) }); cx.assert_editor_state(expected_state); } @@ -6444,7 +6489,7 @@ async fn test_split_selection_into_lines_interacting_with_creases(cx: &mut TestA DisplayPoint::new(DisplayRow(4), 4)..DisplayPoint::new(DisplayRow(4), 4), ]) }); - editor.split_selection_into_lines(&SplitSelectionIntoLines, window, cx); + editor.split_selection_into_lines(&Default::default(), window, cx); assert_eq!( editor.display_text(cx), "aaaaa\nbbbbb\nccc⋯eeee\nfffff\nggggg\n⋯i" @@ -6460,7 +6505,7 @@ async fn test_split_selection_into_lines_interacting_with_creases(cx: &mut TestA DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(0), 1) ]) }); - editor.split_selection_into_lines(&SplitSelectionIntoLines, window, cx); + editor.split_selection_into_lines(&Default::default(), window, cx); assert_eq!( editor.display_text(cx), "aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii" @@ -7970,7 +8015,7 @@ async fn test_select_larger_smaller_syntax_node_for_string(cx: &mut TestAppConte } #[gpui::test] -async fn test_unwrap_syntax_node(cx: &mut gpui::TestAppContext) { +async fn test_unwrap_syntax_nodes(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; @@ -7984,21 +8029,12 @@ async fn test_unwrap_syntax_node(cx: &mut gpui::TestAppContext) { buffer.set_language(Some(language), cx); }); - cx.set_state( - &r#" - use mod1::mod2::{«mod3ˇ», mod4}; - "# - .unindent(), - ); + cx.set_state(indoc! { r#"use mod1::{mod2::{«mod3ˇ», mod4}, mod5::{mod6, «mod7ˇ»}};"# }); cx.update_editor(|editor, window, cx| { editor.unwrap_syntax_node(&UnwrapSyntaxNode, window, cx); }); - cx.assert_editor_state( - &r#" - use mod1::mod2::«mod3ˇ»; - "# - .unindent(), - ); + + cx.assert_editor_state(indoc! { r#"use mod1::{mod2::«mod3ˇ», mod5::«mod7ˇ»};"# }); } #[gpui::test] @@ -8169,6 +8205,216 @@ async fn test_autoindent(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_autoindent_disabled(cx: &mut TestAppContext) { + init_test(cx, |settings| settings.defaults.auto_indent = Some(false)); + + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + surround: false, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: false, + surround: false, + newline: true, + }, + ], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_indents_query( + r#" + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap(), + ); + + let text = "fn a() {}"; + + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(language, cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx)); + editor + .condition::(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) + .await; + + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([5..5, 8..8, 9..9]) + }); + editor.newline(&Newline, window, cx); + assert_eq!( + editor.text(cx), + indoc!( + " + fn a( + + ) { + + } + " + ) + ); + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(1, 0)..Point::new(1, 0), + Point::new(3, 0)..Point::new(3, 0), + Point::new(5, 0)..Point::new(5, 0) + ] + ); + }); +} + +#[gpui::test] +async fn test_autoindent_disabled_with_nested_language(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.auto_indent = Some(true); + settings.languages.0.insert( + "python".into(), + LanguageSettingsContent { + auto_indent: Some(false), + ..Default::default() + }, + ); + }); + + let mut cx = EditorTestContext::new(cx).await; + + let injected_language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + surround: false, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: true, + surround: false, + newline: true, + }, + ], + ..Default::default() + }, + name: "python".into(), + ..Default::default() + }, + Some(tree_sitter_python::LANGUAGE.into()), + ) + .with_indents_query( + r#" + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap(), + ); + + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + surround: false, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: true, + surround: false, + newline: true, + }, + ], + ..Default::default() + }, + name: LanguageName::new("rust"), + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_indents_query( + r#" + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap() + .with_injection_query( + r#" + (macro_invocation + macro: (identifier) @_macro_name + (token_tree) @injection.content + (#set! injection.language "python")) + "#, + ) + .unwrap(), + ); + + cx.language_registry().add(injected_language); + cx.language_registry().add(language.clone()); + + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(language), cx); + }); + + cx.set_state(r#"struct A {ˇ}"#); + + cx.update_editor(|editor, window, cx| { + editor.newline(&Default::default(), window, cx); + }); + + cx.assert_editor_state(indoc!( + "struct A { + ˇ + }" + )); + + cx.set_state(r#"select_biased!(ˇ)"#); + + cx.update_editor(|editor, window, cx| { + editor.newline(&Default::default(), window, cx); + editor.handle_input("def ", window, cx); + editor.handle_input("(", window, cx); + editor.newline(&Default::default(), window, cx); + editor.handle_input("a", window, cx); + }); + + cx.assert_editor_state(indoc!( + "select_biased!( + def ( + aˇ + ) + )" + )); +} + #[gpui::test] async fn test_autoindent_selections(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -8643,7 +8889,7 @@ async fn test_autoclose_with_embedded_language(cx: &mut TestAppContext) { )); cx.language_registry().add(html_language.clone()); - cx.language_registry().add(javascript_language.clone()); + cx.language_registry().add(javascript_language); cx.executor().run_until_parked(); cx.update_buffer(|buffer, cx| { @@ -9387,7 +9633,7 @@ async fn test_snippets(cx: &mut TestAppContext) { .selections .all(cx) .iter() - .map(|s| s.range().clone()) + .map(|s| s.range()) .collect::>(); editor .insert_snippet(&insertion_ranges, snippet, window, cx) @@ -9467,7 +9713,7 @@ async fn test_snippet_indentation(cx: &mut TestAppContext) { .selections .all(cx) .iter() - .map(|s| s.range().clone()) + .map(|s| s.range()) .collect::>(); editor .insert_snippet(&insertion_ranges, snippet, window, cx) @@ -10536,7 +10782,7 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) { kind: Some("code-action-2".into()), edit: Some(lsp::WorkspaceEdit::new( [( - uri.clone(), + uri, vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), "applied-code-action-2-edit\n".to_string(), @@ -12064,7 +12310,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) let counter = Arc::new(AtomicUsize::new(0)); handle_completion_request_with_insert_and_replace( &mut cx, - &buffer_marked_text, + buffer_marked_text, vec![(completion_text, completion_text)], counter.clone(), ) @@ -12078,7 +12324,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) .confirm_completion_replace(&ConfirmCompletionReplace, window, cx) .unwrap() }); - cx.assert_editor_state(&expected_with_replace_mode); + cx.assert_editor_state(expected_with_replace_mode); handle_resolve_completion_request(&mut cx, None).await; apply_additional_edits.await.unwrap(); @@ -12098,7 +12344,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) }); handle_completion_request_with_insert_and_replace( &mut cx, - &buffer_marked_text, + buffer_marked_text, vec![(completion_text, completion_text)], counter.clone(), ) @@ -12112,7 +12358,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) .confirm_completion_insert(&ConfirmCompletionInsert, window, cx) .unwrap() }); - cx.assert_editor_state(&expected_with_insert_mode); + cx.assert_editor_state(expected_with_insert_mode); handle_resolve_completion_request(&mut cx, None).await; apply_additional_edits.await.unwrap(); } @@ -12886,7 +13132,7 @@ async fn test_word_completion(cx: &mut TestAppContext) { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { assert_eq!( - completion_menu_entries(&menu), + completion_menu_entries(menu), &["first", "last"], "When LSP server is fast to reply, no fallback word completions are used" ); @@ -12909,7 +13155,7 @@ async fn test_word_completion(cx: &mut TestAppContext) { cx.update_editor(|editor, _, _| { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { - assert_eq!(completion_menu_entries(&menu), &["one", "three", "two"], + assert_eq!(completion_menu_entries(menu), &["one", "three", "two"], "When LSP server is slow, document words can be shown instead, if configured accordingly"); } else { panic!("expected completion menu to be open"); @@ -12970,7 +13216,7 @@ async fn test_word_completions_do_not_duplicate_lsp_ones(cx: &mut TestAppContext if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { assert_eq!( - completion_menu_entries(&menu), + completion_menu_entries(menu), &["first", "last", "second"], "Word completions that has the same edit as the any of the LSP ones, should not be proposed" ); @@ -13026,7 +13272,7 @@ async fn test_word_completions_continue_on_typing(cx: &mut TestAppContext) { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { assert_eq!( - completion_menu_entries(&menu), + completion_menu_entries(menu), &["first", "last", "second"], "`ShowWordCompletions` action should show word completions" ); @@ -13043,7 +13289,7 @@ async fn test_word_completions_continue_on_typing(cx: &mut TestAppContext) { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { assert_eq!( - completion_menu_entries(&menu), + completion_menu_entries(menu), &["last"], "After showing word completions, further editing should filter them and not query the LSP" ); @@ -13082,7 +13328,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { assert_eq!( - completion_menu_entries(&menu), + completion_menu_entries(menu), &["let"], "With no digits in the completion query, no digits should be in the word completions" ); @@ -13107,7 +13353,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) { cx.update_editor(|editor, _, _| { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { - assert_eq!(completion_menu_entries(&menu), &["33", "35f32"], "The digit is in the completion query, \ + assert_eq!(completion_menu_entries(menu), &["33", "35f32"], "The digit is in the completion query, \ return matching words with digits (`33`, `35f32`) but exclude query duplicates (`3`)"); } else { panic!("expected completion menu to be open"); @@ -13344,7 +13590,7 @@ async fn test_completion_page_up_down_keys(cx: &mut TestAppContext) { cx.update_editor(|editor, _, _| { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { - assert_eq!(completion_menu_entries(&menu), &["first", "last"]); + assert_eq!(completion_menu_entries(menu), &["first", "last"]); } else { panic!("expected completion menu to be open"); } @@ -14120,7 +14366,7 @@ async fn test_toggle_block_comment(cx: &mut TestAppContext) { )); cx.language_registry().add(html_language.clone()); - cx.language_registry().add(javascript_language.clone()); + cx.language_registry().add(javascript_language); cx.update_buffer(|buffer, cx| { buffer.set_language(Some(html_language), cx); }); @@ -14297,7 +14543,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { ); let excerpt_ranges = markers.into_iter().map(|marker| { let context = excerpt_ranges.remove(&marker).unwrap()[0].clone(); - ExcerptRange::new(context.clone()) + ExcerptRange::new(context) }); let buffer = cx.new(|cx| Buffer::local(initial_text, cx)); let multibuffer = cx.new(|cx| { @@ -14582,7 +14828,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) { let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx); - build_editor(buffer.clone(), window, cx) + build_editor(buffer, window, cx) }); _ = editor.update(cx, |editor, window, cx| { @@ -15037,7 +15283,7 @@ async fn go_to_prev_overlapping_diagnostic(executor: BackgroundExecutor, cx: &mu let mut cx = EditorTestContext::new(cx).await; let lsp_store = - cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + cx.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).lsp_store()); cx.set_state(indoc! {" ˇfn func(abc def: i32) -> u32 { @@ -15504,8 +15750,7 @@ async fn test_on_type_formatting_is_applied_after_autoindent(cx: &mut TestAppCon cx.simulate_keystroke("\n"); cx.run_until_parked(); - let buffer_cloned = - cx.multibuffer(|multi_buffer, _| multi_buffer.as_singleton().unwrap().clone()); + let buffer_cloned = cx.multibuffer(|multi_buffer, _| multi_buffer.as_singleton().unwrap()); let mut request = cx.set_request_handler::(move |_, _, mut cx| { let buffer_cloned = buffer_cloned.clone(); @@ -16447,7 +16692,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut TestA if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { assert_eq!( - completion_menu_entries(&menu), + completion_menu_entries(menu), &["bg-blue", "bg-red", "bg-yellow"] ); } else { @@ -16460,7 +16705,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut TestA cx.update_editor(|editor, _, _| { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { - assert_eq!(completion_menu_entries(&menu), &["bg-blue", "bg-yellow"]); + assert_eq!(completion_menu_entries(menu), &["bg-blue", "bg-yellow"]); } else { panic!("expected completion menu to be open"); } @@ -16474,7 +16719,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut TestA cx.update_editor(|editor, _, _| { if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() { - assert_eq!(completion_menu_entries(&menu), &["bg-yellow"]); + assert_eq!(completion_menu_entries(menu), &["bg-yellow"]); } else { panic!("expected completion menu to be open"); } @@ -17043,7 +17288,7 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) { (buffer_2.clone(), base_text_2), (buffer_3.clone(), base_text_3), ] { - let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(diff_base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_diff(diff, cx)); @@ -17664,7 +17909,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) { (buffer_2.clone(), file_2_old), (buffer_3.clone(), file_3_old), ] { - let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(diff_base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_diff(diff, cx)); @@ -19209,7 +19454,7 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp let buffer_id = hunks[0].buffer_id; hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range.clone())) + .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range)) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -19300,7 +19545,7 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp let buffer_id = hunks[0].buffer_id; hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range.clone())) + .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range)) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -19366,7 +19611,7 @@ async fn test_toggle_deletion_hunk_at_start_of_file( let buffer_id = hunks[0].buffer_id; hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range.clone())) + .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range)) .collect::>() }); assert_eq!(hunk_ranges.len(), 1); @@ -19389,7 +19634,7 @@ async fn test_toggle_deletion_hunk_at_start_of_file( }); executor.run_until_parked(); - cx.assert_state_with_diff(hunk_expanded.clone()); + cx.assert_state_with_diff(hunk_expanded); } #[gpui::test] @@ -19589,13 +19834,8 @@ fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) { editor.insert_creases(Some(crease), cx); let snapshot = editor.snapshot(window, cx); - let _div = snapshot.render_crease_toggle( - MultiBufferRow(1), - false, - cx.entity().clone(), - window, - cx, - ); + let _div = + snapshot.render_crease_toggle(MultiBufferRow(1), false, cx.entity(), window, cx); snapshot }) .unwrap(); @@ -20774,7 +21014,7 @@ async fn assert_highlighted_edits( cx.update(|_window, cx| { let highlighted_edits = edit_prediction_edit_text( - &snapshot.as_singleton().unwrap().2, + snapshot.as_singleton().unwrap().2, &edits, &edit_preview, include_deletions, @@ -20790,13 +21030,13 @@ fn assert_breakpoint( path: &Arc, expected: Vec<(u32, Breakpoint)>, ) { - if expected.len() == 0usize { + if expected.is_empty() { assert!(!breakpoints.contains_key(path), "{}", path.display()); } else { let mut breakpoint = breakpoints .get(path) .unwrap() - .into_iter() + .iter() .map(|breakpoint| { ( breakpoint.row, @@ -20825,13 +21065,7 @@ fn add_log_breakpoint_at_cursor( let (anchor, bp) = editor .breakpoints_at_cursors(window, cx) .first() - .and_then(|(anchor, bp)| { - if let Some(bp) = bp { - Some((*anchor, bp.clone())) - } else { - None - } - }) + .and_then(|(anchor, bp)| bp.as_ref().map(|bp| (*anchor, bp.clone()))) .unwrap_or_else(|| { let cursor_position: Point = editor.selections.newest(cx).head(); @@ -20841,7 +21075,7 @@ fn add_log_breakpoint_at_cursor( .buffer_snapshot .anchor_before(Point::new(cursor_position.row, 0)); - (breakpoint_position, Breakpoint::new_log(&log_message)) + (breakpoint_position, Breakpoint::new_log(log_message)) }); editor.edit_breakpoint_at_anchor( @@ -20909,7 +21143,7 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) { let abs_path = project.read_with(cx, |project, cx| { project .absolute_path(&project_path, cx) - .map(|path_buf| Arc::from(path_buf.to_owned())) + .map(Arc::from) .unwrap() }); @@ -20927,7 +21161,6 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints.len()); @@ -20952,7 +21185,6 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints.len()); @@ -20974,7 +21206,6 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(0, breakpoints.len()); @@ -21026,7 +21257,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { let abs_path = project.read_with(cx, |project, cx| { project .absolute_path(&project_path, cx) - .map(|path_buf| Arc::from(path_buf.to_owned())) + .map(Arc::from) .unwrap() }); @@ -21041,7 +21272,6 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_breakpoint( @@ -21062,7 +21292,6 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_breakpoint(&breakpoints, &abs_path, vec![]); @@ -21082,7 +21311,6 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_breakpoint( @@ -21105,7 +21333,6 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_breakpoint( @@ -21128,7 +21355,6 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_breakpoint( @@ -21201,7 +21427,7 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) { let abs_path = project.read_with(cx, |project, cx| { project .absolute_path(&project_path, cx) - .map(|path_buf| Arc::from(path_buf.to_owned())) + .map(Arc::from) .unwrap() }); @@ -21221,7 +21447,6 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints.len()); @@ -21253,7 +21478,6 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); let disable_breakpoint = { @@ -21289,7 +21513,6 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) { .unwrap() .read(cx) .all_source_breakpoints(cx) - .clone() }); assert_eq!(1, breakpoints.len()); @@ -22268,10 +22491,7 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) { let closing_range = buffer.anchor_before(Point::new(0, 6))..buffer.anchor_after(Point::new(0, 8)); let mut linked_ranges = HashMap::default(); - linked_ranges.insert( - buffer_id, - vec![(opening_range.clone(), vec![closing_range.clone()])], - ); + linked_ranges.insert(buffer_id, vec![(opening_range, vec![closing_range])]); editor.linked_edit_ranges = LinkedEditingRanges(linked_ranges); }); let mut completion_handle = @@ -22456,7 +22676,7 @@ async fn test_invisible_worktree_servers(cx: &mut TestAppContext) { ); cx.update(|_, cx| { - workspace::reload(&workspace::Reload::default(), cx); + workspace::reload(cx); }); assert_language_servers_count( 1, @@ -23381,7 +23601,7 @@ pub fn handle_completion_request( complete_from_position ); Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList { - is_incomplete: is_incomplete, + is_incomplete, item_defaults: None, items: completions .iter() diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a7fd0abf8824e3fde311ccb7e4d5724dfacadf85..797b0d663475cc074a688af8af38f044d0523809 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -40,14 +40,15 @@ use git::{ }; use gpui::{ Action, Along, AnyElement, App, AppContext, AvailableSpace, Axis as ScrollbarAxis, BorderStyle, - Bounds, ClickEvent, ContentMask, Context, Corner, Corners, CursorStyle, DispatchPhase, Edges, - Element, ElementInputHandler, Entity, Focusable as _, FontId, GlobalElementId, Hitbox, - HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero, Keystroke, Length, - ModifiersChangedEvent, MouseButton, MouseClickEvent, MouseDownEvent, MouseMoveEvent, - MouseUpEvent, PaintQuad, ParentElement, Pixels, ScrollDelta, ScrollHandle, ScrollWheelEvent, - ShapedLine, SharedString, Size, StatefulInteractiveElement, Style, Styled, TextRun, - TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop, - linear_gradient, outline, point, px, quad, relative, size, solid_background, transparent_black, + Bounds, ClickEvent, ClipboardItem, ContentMask, Context, Corner, Corners, CursorStyle, + DispatchPhase, Edges, Element, ElementInputHandler, Entity, Focusable as _, FontId, + GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero, + Keystroke, Length, ModifiersChangedEvent, MouseButton, MouseClickEvent, MouseDownEvent, + MouseMoveEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, ScrollDelta, ScrollHandle, + ScrollWheelEvent, ShapedLine, SharedString, Size, StatefulInteractiveElement, Style, Styled, + TextRun, TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, + linear_color_stop, linear_gradient, outline, point, px, quad, relative, size, solid_background, + transparent_black, }; use itertools::Itertools; use language::language_settings::{ @@ -60,7 +61,7 @@ use multi_buffer::{ }; use project::{ - ProjectPath, + Entry, ProjectPath, debugger::breakpoint_store::{Breakpoint, BreakpointSessionState}, project_settings::{GitGutterSetting, GitHunkStyleSetting, ProjectSettings}, }; @@ -80,11 +81,17 @@ use std::{ use sum_tree::Bias; use text::{BufferId, SelectionGoal}; use theme::{ActiveTheme, Appearance, BufferLineHeight, PlayerColor}; -use ui::{ButtonLike, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*}; +use ui::{ + ButtonLike, ContextMenu, Indicator, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*, + right_click_menu, +}; use unicode_segmentation::UnicodeSegmentation; use util::post_inc; use util::{RangeExt, ResultExt, debug_panic}; -use workspace::{CollaboratorId, Workspace, item::Item, notifications::NotifyTaskExt}; +use workspace::{ + CollaboratorId, OpenInTerminal, OpenTerminal, RevealInProjectPanel, Workspace, item::Item, + notifications::NotifyTaskExt, +}; /// Determines what kinds of highlights should be applied to a lines background. #[derive(Clone, Copy, Default)] @@ -717,7 +724,7 @@ impl EditorElement { ColumnarMode::FromMouse => true, ColumnarMode::FromSelection => false, }, - mode: mode, + mode, goal_column: point_for_position.exact_unclipped.column(), }, window, @@ -910,6 +917,11 @@ impl EditorElement { } else if cfg!(any(target_os = "linux", target_os = "freebsd")) && event.button == MouseButton::Middle { + #[allow( + clippy::collapsible_if, + clippy::needless_return, + reason = "The cfg-block below makes this a false positive" + )] if !text_hitbox.is_hovered(window) || editor.read_only(cx) { return; } @@ -1115,26 +1127,24 @@ impl EditorElement { let hovered_diff_hunk_row = if let Some(control_row) = hovered_diff_control { Some(control_row) - } else { - if text_hovered { - let current_row = valid_point.row(); - position_map.display_hunks.iter().find_map(|(hunk, _)| { - if let DisplayDiffHunk::Unfolded { - display_row_range, .. - } = hunk - { - if display_row_range.contains(¤t_row) { - Some(display_row_range.start) - } else { - None - } + } else if text_hovered { + let current_row = valid_point.row(); + position_map.display_hunks.iter().find_map(|(hunk, _)| { + if let DisplayDiffHunk::Unfolded { + display_row_range, .. + } = hunk + { + if display_row_range.contains(¤t_row) { + Some(display_row_range.start) } else { None } - }) - } else { - None - } + } else { + None + } + }) + } else { + None }; if hovered_diff_hunk_row != editor.hovered_diff_hunk_row { @@ -1148,14 +1158,14 @@ impl EditorElement { .inline_blame_popover .as_ref() .and_then(|state| state.popover_bounds) - .map_or(false, |bounds| bounds.contains(&event.position)); + .is_some_and(|bounds| bounds.contains(&event.position)); let keyboard_grace = editor .inline_blame_popover .as_ref() - .map_or(false, |state| state.keyboard_grace); + .is_some_and(|state| state.keyboard_grace); if mouse_over_inline_blame || mouse_over_popover { - editor.show_blame_popover(&blame_entry, event.position, false, cx); + editor.show_blame_popover(blame_entry, event.position, false, cx); } else if !keyboard_grace { editor.hide_blame_popover(cx); } @@ -1179,10 +1189,10 @@ impl EditorElement { let is_visible = editor .gutter_breakpoint_indicator .0 - .map_or(false, |indicator| indicator.is_active); + .is_some_and(|indicator| indicator.is_active); let has_existing_breakpoint = - editor.breakpoint_store.as_ref().map_or(false, |store| { + editor.breakpoint_store.as_ref().is_some_and(|store| { let Some(project) = &editor.project else { return false; }; @@ -1380,29 +1390,27 @@ impl EditorElement { ref drop_cursor, ref hide_drop_cursor, } = editor.selection_drag_state + && !hide_drop_cursor + && (drop_cursor + .start + .cmp(&selection.start, &snapshot.buffer_snapshot) + .eq(&Ordering::Less) + || drop_cursor + .end + .cmp(&selection.end, &snapshot.buffer_snapshot) + .eq(&Ordering::Greater)) { - if !hide_drop_cursor - && (drop_cursor - .start - .cmp(&selection.start, &snapshot.buffer_snapshot) - .eq(&Ordering::Less) - || drop_cursor - .end - .cmp(&selection.end, &snapshot.buffer_snapshot) - .eq(&Ordering::Greater)) - { - let drag_cursor_layout = SelectionLayout::new( - drop_cursor.clone(), - false, - CursorShape::Bar, - &snapshot.display_snapshot, - false, - false, - None, - ); - let absent_color = cx.theme().players().absent(); - selections.push((absent_color, vec![drag_cursor_layout])); - } + let drag_cursor_layout = SelectionLayout::new( + drop_cursor.clone(), + false, + CursorShape::Bar, + &snapshot.display_snapshot, + false, + false, + None, + ); + let absent_color = cx.theme().players().absent(); + selections.push((absent_color, vec![drag_cursor_layout])); } } @@ -1413,19 +1421,15 @@ impl EditorElement { CollaboratorId::PeerId(peer_id) => { if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&peer_id) - { - if let Some(participant_index) = collaboration_hub + && let Some(participant_index) = collaboration_hub .user_participant_indices(cx) .get(&collaborator.user_id) - { - if let Some((local_selection_style, _)) = selections.first_mut() - { - *local_selection_style = cx - .theme() - .players() - .color_for_participant(participant_index.0); - } - } + && let Some((local_selection_style, _)) = selections.first_mut() + { + *local_selection_style = cx + .theme() + .players() + .color_for_participant(participant_index.0); } } CollaboratorId::Agent => { @@ -2168,11 +2172,13 @@ impl EditorElement { }; let padding = ProjectSettings::get_global(cx).diagnostics.inline.padding as f32 * em_width; - let min_x = ProjectSettings::get_global(cx) - .diagnostics - .inline - .min_column as f32 - * em_width; + let min_x = self.column_pixels( + ProjectSettings::get_global(cx) + .diagnostics + .inline + .min_column as usize, + window, + ); let mut elements = HashMap::default(); for (row, mut diagnostics) in diagnostics_by_rows { @@ -2213,12 +2219,11 @@ impl EditorElement { cmp::max(padded_line, min_start) }; - let behind_edit_prediction_popover = edit_prediction_popover_origin.as_ref().map_or( - false, - |edit_prediction_popover_origin| { + let behind_edit_prediction_popover = edit_prediction_popover_origin + .as_ref() + .is_some_and(|edit_prediction_popover_origin| { (pos_y..pos_y + line_height).contains(&edit_prediction_popover_origin.y) - }, - ); + }); let opacity = if behind_edit_prediction_popover { 0.5 } else { @@ -2284,9 +2289,7 @@ impl EditorElement { None } }) - .map_or(false, |source| { - matches!(source, CodeActionSource::Indicator(..)) - }); + .is_some_and(|source| matches!(source, CodeActionSource::Indicator(..))); Some(editor.render_inline_code_actions(icon_size, display_point.row(), active, cx)) })?; @@ -2434,14 +2437,13 @@ impl EditorElement { .unwrap_or_default() .padding as f32; - if let Some(edit_prediction) = editor.active_edit_prediction.as_ref() { - match &edit_prediction.completion { - EditPrediction::Edit { - display_mode: EditDisplayMode::TabAccept, - .. - } => padding += INLINE_ACCEPT_SUGGESTION_EM_WIDTHS, - _ => {} - } + if let Some(edit_prediction) = editor.active_edit_prediction.as_ref() + && let EditPrediction::Edit { + display_mode: EditDisplayMode::TabAccept, + .. + } = &edit_prediction.completion + { + padding += INLINE_ACCEPT_SUGGESTION_EM_WIDTHS } padding * em_width @@ -2747,7 +2749,10 @@ impl EditorElement { let mut block_offset = 0; let mut found_excerpt_header = false; for (_, block) in snapshot.blocks_in_range(prev_line..row_range.start) { - if matches!(block, Block::ExcerptBoundary { .. }) { + if matches!( + block, + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. } + ) { found_excerpt_header = true; break; } @@ -2764,7 +2769,10 @@ impl EditorElement { let mut block_height = 0; let mut found_excerpt_header = false; for (_, block) in snapshot.blocks_in_range(row_range.end..cons_line) { - if matches!(block, Block::ExcerptBoundary { .. }) { + if matches!( + block, + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. } + ) { found_excerpt_header = true; } block_height += block.height(); @@ -2811,7 +2819,7 @@ impl EditorElement { } let row = - MultiBufferRow(DisplayPoint::new(display_row, 0).to_point(&snapshot).row); + MultiBufferRow(DisplayPoint::new(display_row, 0).to_point(snapshot).row); if snapshot.is_line_folded(row) { return None; } @@ -2902,7 +2910,7 @@ impl EditorElement { if multibuffer_row .0 .checked_sub(1) - .map_or(false, |previous_row| { + .is_some_and(|previous_row| { snapshot.is_line_folded(MultiBufferRow(previous_row)) }) { @@ -2975,8 +2983,8 @@ impl EditorElement { .ilog10() + 1; - let elements = buffer_rows - .into_iter() + buffer_rows + .iter() .enumerate() .map(|(ix, row_info)| { let ExpandInfo { @@ -3011,7 +3019,7 @@ impl EditorElement { .icon_color(Color::Custom(cx.theme().colors().editor_line_number)) .selected_icon_color(Color::Custom(cx.theme().colors().editor_foreground)) .icon_size(IconSize::Custom(rems(editor_font_size / window.rem_size()))) - .width(width.into()) + .width(width) .on_click(move |_, window, cx| { editor.update(cx, |editor, cx| { editor.expand_excerpt(excerpt_id, direction, window, cx); @@ -3031,9 +3039,7 @@ impl EditorElement { Some((toggle, origin)) }) - .collect(); - - elements + .collect() } fn calculate_relative_line_numbers( @@ -3133,7 +3139,7 @@ impl EditorElement { let relative_rows = self.calculate_relative_line_numbers(snapshot, &rows, relative_to); let mut line_number = String::new(); let line_numbers = buffer_rows - .into_iter() + .iter() .enumerate() .flat_map(|(ix, row_info)| { let display_row = DisplayRow(rows.start.0 + ix as u32); @@ -3210,7 +3216,7 @@ impl EditorElement { && self.editor.read(cx).is_singleton(cx); if include_fold_statuses { row_infos - .into_iter() + .iter() .enumerate() .map(|(ix, info)| { if info.expand_info.is_some() { @@ -3305,7 +3311,7 @@ impl EditorElement { let chunks = snapshot.highlighted_chunks(rows.clone(), true, style); LineWithInvisibles::from_chunks( chunks, - &style, + style, MAX_LINE_LEN, rows.len(), &snapshot.mode, @@ -3386,7 +3392,7 @@ impl EditorElement { let line_ix = align_to.row().0.checked_sub(rows.start.0); x_position = if let Some(layout) = line_ix.and_then(|ix| line_layouts.get(ix as usize)) { - x_and_width(&layout) + x_and_width(layout) } else { x_and_width(&layout_line( align_to.row(), @@ -3452,42 +3458,41 @@ impl EditorElement { .into_any_element() } - Block::ExcerptBoundary { - excerpt, - height, - starts_new_buffer, - .. - } => { + Block::ExcerptBoundary { .. } => { let color = cx.theme().colors().clone(); let mut result = v_flex().id(block_id).w_full(); + result = result.child( + h_flex().relative().child( + div() + .top(line_height / 2.) + .absolute() + .w_full() + .h_px() + .bg(color.border_variant), + ), + ); + + result.into_any() + } + + Block::BufferHeader { excerpt, height } => { + let mut result = v_flex().id(block_id).w_full(); + let jump_data = header_jump_data(snapshot, block_row_start, *height, excerpt); - if *starts_new_buffer { - if sticky_header_excerpt_id != Some(excerpt.id) { - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + if sticky_header_excerpt_id != Some(excerpt.id) { + let selected = selected_buffer_ids.contains(&excerpt.buffer_id); - result = result.child(div().pr(editor_margins.right).child( - self.render_buffer_header( - excerpt, false, selected, false, jump_data, window, cx, - ), - )); - } else { - result = - result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height())); - } - } else { - result = result.child( - h_flex().relative().child( - div() - .top(line_height / 2.) - .absolute() - .w_full() - .h_px() - .bg(color.border_variant), + result = result.child(div().pr(editor_margins.right).child( + self.render_buffer_header( + excerpt, false, selected, false, jump_data, window, cx, ), - ); - }; + )); + } else { + result = + result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height())); + } result.into_any() } @@ -3511,33 +3516,33 @@ impl EditorElement { let mut x_offset = px(0.); let mut is_block = true; - if let BlockId::Custom(custom_block_id) = block_id { - if block.has_height() { - if block.place_near() { - if let Some((x_target, line_width)) = x_position { - let margin = em_width * 2; - if line_width + final_size.width + margin - < editor_width + editor_margins.gutter.full_width() - && !row_block_types.contains_key(&(row - 1)) - && element_height_in_lines == 1 - { - x_offset = line_width + margin; - row = row - 1; - is_block = false; - element_height_in_lines = 0; - row_block_types.insert(row, is_block); - } else { - let max_offset = editor_width + editor_margins.gutter.full_width() - - final_size.width; - let min_offset = (x_target + em_width - final_size.width) - .max(editor_margins.gutter.full_width()); - x_offset = x_target.min(max_offset).max(min_offset); - } - } - }; - if element_height_in_lines != block.height() { - resized_blocks.insert(custom_block_id, element_height_in_lines); + if let BlockId::Custom(custom_block_id) = block_id + && block.has_height() + { + if block.place_near() + && let Some((x_target, line_width)) = x_position + { + let margin = em_width * 2; + if line_width + final_size.width + margin + < editor_width + editor_margins.gutter.full_width() + && !row_block_types.contains_key(&(row - 1)) + && element_height_in_lines == 1 + { + x_offset = line_width + margin; + row = row - 1; + is_block = false; + element_height_in_lines = 0; + row_block_types.insert(row, is_block); + } else { + let max_offset = + editor_width + editor_margins.gutter.full_width() - final_size.width; + let min_offset = (x_target + em_width - final_size.width) + .max(editor_margins.gutter.full_width()); + x_offset = x_target.min(max_offset).max(min_offset); } + }; + if element_height_in_lines != block.height() { + resized_blocks.insert(custom_block_id, element_height_in_lines); } } for i in 0..element_height_in_lines { @@ -3556,11 +3561,10 @@ impl EditorElement { jump_data: JumpData, window: &mut Window, cx: &mut App, - ) -> Div { + ) -> impl IntoElement { let editor = self.editor.read(cx); - let file_status = editor - .buffer - .read(cx) + let multi_buffer = editor.buffer.read(cx); + let file_status = multi_buffer .all_diff_hunks_expanded() .then(|| { editor @@ -3570,6 +3574,17 @@ impl EditorElement { .status_for_buffer_id(for_excerpt.buffer_id, cx) }) .flatten(); + let indicator = multi_buffer + .buffer(for_excerpt.buffer_id) + .and_then(|buffer| { + let buffer = buffer.read(cx); + let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) { + (true, _) => Some(Color::Warning), + (_, true) => Some(Color::Accent), + (false, false) => None, + }; + indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color)) + }); let include_root = editor .project @@ -3577,126 +3592,126 @@ impl EditorElement { .map(|project| project.read(cx).visible_worktrees(cx).count() > 1) .unwrap_or_default(); let can_open_excerpts = Editor::can_open_excerpts_in_file(for_excerpt.buffer.file()); - let path = for_excerpt.buffer.resolve_file_path(cx, include_root); - let filename = path + let relative_path = for_excerpt.buffer.resolve_file_path(cx, include_root); + let filename = relative_path .as_ref() .and_then(|path| Some(path.file_name()?.to_string_lossy().to_string())); - let parent_path = path.as_ref().and_then(|path| { + let parent_path = relative_path.as_ref().and_then(|path| { Some(path.parent()?.to_string_lossy().to_string() + std::path::MAIN_SEPARATOR_STR) }); let focus_handle = editor.focus_handle(cx); let colors = cx.theme().colors(); - div() - .p_1() - .w_full() - .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) - .child( - h_flex() - .size_full() - .gap_2() - .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) - .pl_0p5() - .pr_5() - .rounded_sm() - .when(is_sticky, |el| el.shadow_md()) - .border_1() - .map(|div| { - let border_color = if is_selected - && is_folded - && focus_handle.contains_focused(window, cx) - { - colors.border_focused - } else { - colors.border - }; - div.border_color(border_color) - }) - .bg(colors.editor_subheader_background) - .hover(|style| style.bg(colors.element_hover)) - .map(|header| { - let editor = self.editor.clone(); - let buffer_id = for_excerpt.buffer_id; - let toggle_chevron_icon = - FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path); - header.child( - div() - .hover(|style| style.bg(colors.element_selected)) - .rounded_xs() - .child( - ButtonLike::new("toggle-buffer-fold") - .style(ui::ButtonStyle::Transparent) - .height(px(28.).into()) - .width(px(28.).into()) - .children(toggle_chevron_icon) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::with_meta_in( - "Toggle Excerpt Fold", - Some(&ToggleFold), - "Alt+click to toggle all", - &focus_handle, - window, - cx, - ) - } - }) - .on_click(move |event, window, cx| { - if event.modifiers().alt { - // Alt+click toggles all buffers - editor.update(cx, |editor, cx| { - editor.toggle_fold_all( - &ToggleFoldAll, + let header = + div() + .p_1() + .w_full() + .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) + .child( + h_flex() + .size_full() + .gap_2() + .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) + .pl_0p5() + .pr_5() + .rounded_sm() + .when(is_sticky, |el| el.shadow_md()) + .border_1() + .map(|div| { + let border_color = if is_selected + && is_folded + && focus_handle.contains_focused(window, cx) + { + colors.border_focused + } else { + colors.border + }; + div.border_color(border_color) + }) + .bg(colors.editor_subheader_background) + .hover(|style| style.bg(colors.element_hover)) + .map(|header| { + let editor = self.editor.clone(); + let buffer_id = for_excerpt.buffer_id; + let toggle_chevron_icon = + FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path); + header.child( + div() + .hover(|style| style.bg(colors.element_selected)) + .rounded_xs() + .child( + ButtonLike::new("toggle-buffer-fold") + .style(ui::ButtonStyle::Transparent) + .height(px(28.).into()) + .width(px(28.)) + .children(toggle_chevron_icon) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |window, cx| { + Tooltip::with_meta_in( + "Toggle Excerpt Fold", + Some(&ToggleFold), + "Alt+click to toggle all", + &focus_handle, window, cx, - ); - }); - } else { - // Regular click toggles single buffer - if is_folded { + ) + } + }) + .on_click(move |event, window, cx| { + if event.modifiers().alt { + // Alt+click toggles all buffers editor.update(cx, |editor, cx| { - editor.unfold_buffer(buffer_id, cx); + editor.toggle_fold_all( + &ToggleFoldAll, + window, + cx, + ); }); } else { - editor.update(cx, |editor, cx| { - editor.fold_buffer(buffer_id, cx); - }); + // Regular click toggles single buffer + if is_folded { + editor.update(cx, |editor, cx| { + editor.unfold_buffer(buffer_id, cx); + }); + } else { + editor.update(cx, |editor, cx| { + editor.fold_buffer(buffer_id, cx); + }); + } } - } - }), - ), + }), + ), + ) + }) + .children( + editor + .addons + .values() + .filter_map(|addon| { + addon.render_buffer_header_controls(for_excerpt, window, cx) + }) + .take(1), ) - }) - .children( - editor - .addons - .values() - .filter_map(|addon| { - addon.render_buffer_header_controls(for_excerpt, window, cx) - }) - .take(1), - ) - .child( - h_flex() - .cursor_pointer() - .id("path header block") - .size_full() - .justify_between() - .overflow_hidden() - .child( - h_flex() - .gap_2() - .child( - Label::new( - filename - .map(SharedString::from) - .unwrap_or_else(|| "untitled".into()), - ) - .single_line() - .when_some( - file_status, - |el, status| { + .children(indicator) + .child( + h_flex() + .cursor_pointer() + .id("path header block") + .size_full() + .justify_between() + .overflow_hidden() + .child( + h_flex() + .gap_2() + .child( + Label::new( + filename + .map(SharedString::from) + .unwrap_or_else(|| "untitled".into()), + ) + .single_line() + .when_some(file_status, |el, status| { el.color(if status.is_conflicted() { Color::Conflict } else if status.is_modified() { @@ -3707,49 +3722,145 @@ impl EditorElement { Color::Created }) .when(status.is_deleted(), |el| el.strikethrough()) - }, - ), - ) - .when_some(parent_path, |then, path| { - then.child(div().child(path).text_color( - if file_status.is_some_and(FileStatus::is_deleted) { - colors.text_disabled - } else { - colors.text_muted - }, - )) + }), + ) + .when_some(parent_path, |then, path| { + then.child(div().child(path).text_color( + if file_status.is_some_and(FileStatus::is_deleted) { + colors.text_disabled + } else { + colors.text_muted + }, + )) + }), + ) + .when( + can_open_excerpts && is_selected && relative_path.is_some(), + |el| { + el.child( + h_flex() + .id("jump-to-file-button") + .gap_2p5() + .child(Label::new("Jump To File")) + .children( + KeyBinding::for_action_in( + &OpenExcerpts, + &focus_handle, + window, + cx, + ) + .map(|binding| binding.into_any_element()), + ), + ) + }, + ) + .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) + .on_click(window.listener_for(&self.editor, { + move |editor, e: &ClickEvent, window, cx| { + editor.open_excerpts_common( + Some(jump_data.clone()), + e.modifiers().secondary(), + window, + cx, + ); + } + })), + ), + ); + + let file = for_excerpt.buffer.file().cloned(); + let editor = self.editor.clone(); + right_click_menu("buffer-header-context-menu") + .trigger(move |_, _, _| header) + .menu(move |window, cx| { + let menu_context = focus_handle.clone(); + let editor = editor.clone(); + let file = file.clone(); + ContextMenu::build(window, cx, move |mut menu, window, cx| { + if let Some(file) = file + && let Some(project) = editor.read(cx).project() + && let Some(worktree) = + project.read(cx).worktree_for_id(file.worktree_id(cx), cx) + { + let relative_path = file.path(); + let entry_for_path = worktree.read(cx).entry_for_path(relative_path); + let abs_path = entry_for_path.and_then(|e| e.canonical_path.as_deref()); + let has_relative_path = + worktree.read(cx).root_entry().is_some_and(Entry::is_dir); + + let parent_abs_path = + abs_path.and_then(|abs_path| Some(abs_path.parent()?.to_path_buf())); + let relative_path = has_relative_path + .then_some(relative_path) + .map(ToOwned::to_owned); + + let visible_in_project_panel = + relative_path.is_some() && worktree.read(cx).is_visible(); + let reveal_in_project_panel = entry_for_path + .filter(|_| visible_in_project_panel) + .map(|entry| entry.id); + menu = menu + .when_some(abs_path.map(ToOwned::to_owned), |menu, abs_path| { + menu.entry( + "Copy Path", + Some(Box::new(zed_actions::workspace::CopyPath)), + window.handler_for(&editor, move |_, _, cx| { + cx.write_to_clipboard(ClipboardItem::new_string( + abs_path.to_string_lossy().to_string(), + )); + }), + ) + }) + .when_some(relative_path, |menu, relative_path| { + menu.entry( + "Copy Relative Path", + Some(Box::new(zed_actions::workspace::CopyRelativePath)), + window.handler_for(&editor, move |_, _, cx| { + cx.write_to_clipboard(ClipboardItem::new_string( + relative_path.to_string_lossy().to_string(), + )); }), + ) + }) + .when( + reveal_in_project_panel.is_some() || parent_abs_path.is_some(), + |menu| menu.separator(), ) - .when(can_open_excerpts && is_selected && path.is_some(), |el| { - el.child( - h_flex() - .id("jump-to-file-button") - .gap_2p5() - .child(Label::new("Jump To File")) - .children( - KeyBinding::for_action_in( - &OpenExcerpts, - &focus_handle, - window, - cx, - ) - .map(|binding| binding.into_any_element()), - ), + .when_some(reveal_in_project_panel, |menu, entry_id| { + menu.entry( + "Reveal In Project Panel", + Some(Box::new(RevealInProjectPanel::default())), + window.handler_for(&editor, move |editor, _, cx| { + if let Some(project) = &mut editor.project { + project.update(cx, |_, cx| { + cx.emit(project::Event::RevealInProjectPanel( + entry_id, + )) + }); + } + }), ) }) - .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) - .on_click(window.listener_for(&self.editor, { - move |editor, e: &ClickEvent, window, cx| { - editor.open_excerpts_common( - Some(jump_data.clone()), - e.modifiers().secondary(), - window, - cx, - ); - } - })), - ), - ) + .when_some(parent_abs_path, |menu, parent_abs_path| { + menu.entry( + "Open in Terminal", + Some(Box::new(OpenInTerminal)), + window.handler_for(&editor, move |_, window, cx| { + window.dispatch_action( + OpenTerminal { + working_directory: parent_abs_path.clone(), + } + .boxed_clone(), + cx, + ); + }), + ) + }); + } + + menu.context(menu_context) + }) + }) } fn render_blocks( @@ -3787,7 +3898,7 @@ impl EditorElement { for (row, block) in fixed_blocks { let block_id = block.id(); - if focused_block.as_ref().map_or(false, |b| b.id == block_id) { + if focused_block.as_ref().is_some_and(|b| b.id == block_id) { focused_block = None; } @@ -3844,7 +3955,7 @@ impl EditorElement { }; let block_id = block.id(); - if focused_block.as_ref().map_or(false, |b| b.id == block_id) { + if focused_block.as_ref().is_some_and(|b| b.id == block_id) { focused_block = None; } @@ -3885,60 +3996,58 @@ impl EditorElement { } } - if let Some(focused_block) = focused_block { - if let Some(focus_handle) = focused_block.focus_handle.upgrade() { - if focus_handle.is_focused(window) { - if let Some(block) = snapshot.block_for_id(focused_block.id) { - let style = block.style(); - let width = match style { - BlockStyle::Fixed => AvailableSpace::MinContent, - BlockStyle::Flex => AvailableSpace::Definite( - hitbox - .size - .width - .max(fixed_block_max_width) - .max(editor_margins.gutter.width + *scroll_width), - ), - BlockStyle::Sticky => AvailableSpace::Definite(hitbox.size.width), - }; + if let Some(focused_block) = focused_block + && let Some(focus_handle) = focused_block.focus_handle.upgrade() + && focus_handle.is_focused(window) + && let Some(block) = snapshot.block_for_id(focused_block.id) + { + let style = block.style(); + let width = match style { + BlockStyle::Fixed => AvailableSpace::MinContent, + BlockStyle::Flex => AvailableSpace::Definite( + hitbox + .size + .width + .max(fixed_block_max_width) + .max(editor_margins.gutter.width + *scroll_width), + ), + BlockStyle::Sticky => AvailableSpace::Definite(hitbox.size.width), + }; - if let Some((element, element_size, _, x_offset)) = self.render_block( - &block, - width, - focused_block.id, - rows.end, - snapshot, - text_x, - &rows, - line_layouts, - editor_margins, - line_height, - em_width, - text_hitbox, - editor_width, - scroll_width, - &mut resized_blocks, - &mut row_block_types, - selections, - selected_buffer_ids, - is_row_soft_wrapped, - sticky_header_excerpt_id, - window, - cx, - ) { - blocks.push(BlockLayout { - id: block.id(), - x_offset, - row: None, - element, - available_space: size(width, element_size.height.into()), - style, - overlaps_gutter: true, - is_buffer_header: block.is_buffer_header(), - }); - } - } - } + if let Some((element, element_size, _, x_offset)) = self.render_block( + &block, + width, + focused_block.id, + rows.end, + snapshot, + text_x, + &rows, + line_layouts, + editor_margins, + line_height, + em_width, + text_hitbox, + editor_width, + scroll_width, + &mut resized_blocks, + &mut row_block_types, + selections, + selected_buffer_ids, + is_row_soft_wrapped, + sticky_header_excerpt_id, + window, + cx, + ) { + blocks.push(BlockLayout { + id: block.id(), + x_offset, + row: None, + element, + available_space: size(width, element_size.height.into()), + style, + overlaps_gutter: true, + is_buffer_header: block.is_buffer_header(), + }); } } @@ -4101,19 +4210,19 @@ impl EditorElement { edit_prediction_popover_visible = true; } - if editor.context_menu_visible() { - if let Some(crate::ContextMenuOrigin::Cursor) = editor.context_menu_origin() { - let (min_height_in_lines, max_height_in_lines) = editor - .context_menu_options - .as_ref() - .map_or((3, 12), |options| { - (options.min_entries_visible, options.max_entries_visible) - }); + if editor.context_menu_visible() + && let Some(crate::ContextMenuOrigin::Cursor) = editor.context_menu_origin() + { + let (min_height_in_lines, max_height_in_lines) = editor + .context_menu_options + .as_ref() + .map_or((3, 12), |options| { + (options.min_entries_visible, options.max_entries_visible) + }); - min_menu_height += line_height * min_height_in_lines as f32 + POPOVER_Y_PADDING; - max_menu_height += line_height * max_height_in_lines as f32 + POPOVER_Y_PADDING; - context_menu_visible = true; - } + min_menu_height += line_height * min_height_in_lines as f32 + POPOVER_Y_PADDING; + max_menu_height += line_height * max_height_in_lines as f32 + POPOVER_Y_PADDING; + context_menu_visible = true; } context_menu_placement = editor .context_menu_options @@ -4625,7 +4734,7 @@ impl EditorElement { } }; - let source_included = source_display_point.map_or(true, |source_display_point| { + let source_included = source_display_point.is_none_or(|source_display_point| { visible_range .to_inclusive() .contains(&source_display_point.row()) @@ -4805,7 +4914,7 @@ impl EditorElement { let intersects_menu = |bounds: Bounds| -> bool { context_menu_layout .as_ref() - .map_or(false, |menu| bounds.intersects(&menu.bounds)) + .is_some_and(|menu| bounds.intersects(&menu.bounds)) }; let can_place_above = { @@ -4990,7 +5099,7 @@ impl EditorElement { if active_positions .iter() - .any(|p| p.map_or(false, |p| display_row_range.contains(&p.row()))) + .any(|p| p.is_some_and(|p| display_row_range.contains(&p.row()))) { let y = display_row_range.start.as_f32() * line_height + text_hitbox.bounds.top() @@ -5103,7 +5212,7 @@ impl EditorElement { let intersects_menu = |bounds: Bounds| -> bool { context_menu_layout .as_ref() - .map_or(false, |menu| bounds.intersects(&menu.bounds)) + .is_some_and(|menu| bounds.intersects(&menu.bounds)) }; let final_origin = if popover_bounds_above.is_contained_within(hitbox) @@ -5188,7 +5297,7 @@ impl EditorElement { let mut end_row = start_row.0; while active_rows .peek() - .map_or(false, |(active_row, has_selection)| { + .is_some_and(|(active_row, has_selection)| { active_row.0 == end_row + 1 && has_selection.selection == contains_non_empty_selection.selection }) @@ -5447,9 +5556,9 @@ impl EditorElement { // In singleton buffers, we select corresponding lines on the line number click, so use | -like cursor. // In multi buffers, we open file at the line number clicked, so use a pointing hand cursor. if is_singleton { - window.set_cursor_style(CursorStyle::IBeam, &hitbox); + window.set_cursor_style(CursorStyle::IBeam, hitbox); } else { - window.set_cursor_style(CursorStyle::PointingHand, &hitbox); + window.set_cursor_style(CursorStyle::PointingHand, hitbox); } } } @@ -5468,7 +5577,7 @@ impl EditorElement { &layout.position_map.snapshot, line_height, layout.gutter_hitbox.bounds, - &hunk, + hunk, ); Some(( hunk_bounds, @@ -5604,7 +5713,10 @@ impl EditorElement { let end_row_in_current_excerpt = snapshot .blocks_in_range(start_row..end_row) .find_map(|(start_row, block)| { - if matches!(block, Block::ExcerptBoundary { .. }) { + if matches!( + block, + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. } + ) { Some(start_row) } else { None @@ -5659,16 +5771,15 @@ impl EditorElement { cx: &mut App, ) { for (_, hunk_hitbox) in &layout.display_hunks { - if let Some(hunk_hitbox) = hunk_hitbox { - if !self + if let Some(hunk_hitbox) = hunk_hitbox + && !self .editor .read(cx) .buffer() .read(cx) .all_diff_hunks_expanded() - { - window.set_cursor_style(CursorStyle::PointingHand, hunk_hitbox); - } + { + window.set_cursor_style(CursorStyle::PointingHand, hunk_hitbox); } } @@ -5990,10 +6101,10 @@ impl EditorElement { if axis == ScrollbarAxis::Vertical { let fast_markers = - self.collect_fast_scrollbar_markers(layout, &scrollbar_layout, cx); + self.collect_fast_scrollbar_markers(layout, scrollbar_layout, cx); // Refresh slow scrollbar markers in the background. Below, we // paint whatever markers have already been computed. - self.refresh_slow_scrollbar_markers(layout, &scrollbar_layout, window, cx); + self.refresh_slow_scrollbar_markers(layout, scrollbar_layout, window, cx); let markers = self.editor.read(cx).scrollbar_marker_state.markers.clone(); for marker in markers.iter().chain(&fast_markers) { @@ -6027,7 +6138,7 @@ impl EditorElement { if any_scrollbar_dragged { window.set_window_cursor_style(CursorStyle::Arrow); } else { - window.set_cursor_style(CursorStyle::Arrow, &hitbox); + window.set_cursor_style(CursorStyle::Arrow, hitbox); } } }) @@ -6577,25 +6688,23 @@ impl EditorElement { editor.set_scroll_position(position, window, cx); } cx.stop_propagation(); - } else { - if minimap_hitbox.is_hovered(window) { - editor.scroll_manager.set_is_hovering_minimap_thumb( - !event.dragging() - && layout - .thumb_layout - .thumb_bounds - .is_some_and(|bounds| bounds.contains(&event.position)), - cx, - ); + } else if minimap_hitbox.is_hovered(window) { + editor.scroll_manager.set_is_hovering_minimap_thumb( + !event.dragging() + && layout + .thumb_layout + .thumb_bounds + .is_some_and(|bounds| bounds.contains(&event.position)), + cx, + ); - // Stop hover events from propagating to the - // underlying editor if the minimap hitbox is hovered - if !event.dragging() { - cx.stop_propagation(); - } - } else { - editor.scroll_manager.hide_minimap_thumb(cx); + // Stop hover events from propagating to the + // underlying editor if the minimap hitbox is hovered + if !event.dragging() { + cx.stop_propagation(); } + } else { + editor.scroll_manager.hide_minimap_thumb(cx); } mouse_position = event.position; }); @@ -6974,9 +7083,7 @@ impl EditorElement { let unstaged_hollow = ProjectSettings::get_global(cx) .git .hunk_style - .map_or(false, |style| { - matches!(style, GitHunkStyleSetting::UnstagedHollow) - }); + .is_some_and(|style| matches!(style, GitHunkStyleSetting::UnstagedHollow)); unstaged == unstaged_hollow } @@ -7110,7 +7217,7 @@ fn render_blame_entry_popover( ) -> Option { let renderer = cx.global::().0.clone(); let blame = blame.read(cx); - let repository = blame.repository(cx)?.clone(); + let repository = blame.repository(cx)?; renderer.render_blame_entry_popover( blame_entry, scroll_handle, @@ -7815,7 +7922,7 @@ impl Element for EditorElement { min_lines, max_lines, } => { - let editor_handle = cx.entity().clone(); + let editor_handle = cx.entity(); let max_line_number_width = self.max_line_number_width(&editor.snapshot(window, cx), window); window.request_measured_layout( @@ -8006,7 +8113,7 @@ impl Element for EditorElement { // The max scroll position for the top of the window let max_scroll_top = if matches!( snapshot.mode, - EditorMode::SingleLine { .. } + EditorMode::SingleLine | EditorMode::AutoHeight { .. } | EditorMode::Full { sized_by_content: true, @@ -8073,7 +8180,7 @@ impl Element for EditorElement { let is_row_soft_wrapped = |row: usize| { row_infos .get(row) - .map_or(true, |info| info.buffer_row.is_none()) + .is_none_or(|info| info.buffer_row.is_none()) }; let start_anchor = if start_row == Default::default() { @@ -8910,7 +9017,7 @@ impl Element for EditorElement { .as_ref() .map(|layout| (layout.bounds, layout.entry.clone())), display_hunks: display_hunks.clone(), - diff_hunk_control_bounds: diff_hunk_control_bounds.clone(), + diff_hunk_control_bounds, }); self.editor.update(cx, |editor, _| { @@ -9608,14 +9715,12 @@ impl PointForPosition { false } else if start_row == end_row { candidate_col >= start_col && candidate_col < end_col + } else if candidate_row == start_row { + candidate_col >= start_col + } else if candidate_row == end_row { + candidate_col < end_col } else { - if candidate_row == start_row { - candidate_col >= start_col - } else if candidate_row == end_row { - candidate_col < end_col - } else { - true - } + true } } } @@ -9680,7 +9785,7 @@ pub fn layout_line( let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), true, style); LineWithInvisibles::from_chunks( chunks, - &style, + style, MAX_LINE_LEN, 1, &snapshot.mode, @@ -9797,7 +9902,7 @@ impl CursorLayout { .px_0p5() .line_height(text_size + px(2.)) .text_color(cursor_name.color) - .child(cursor_name.string.clone()) + .child(cursor_name.string) .into_any_element(); name_element.prepaint_as_root(name_origin, AvailableSpace::min_size(), window, cx); @@ -10050,10 +10155,10 @@ fn compute_auto_height_layout( let overscroll = size(em_width, px(0.)); let editor_width = text_width - gutter_dimensions.margin - overscroll.width - em_width; - if !matches!(editor.soft_wrap_mode(cx), SoftWrap::None) { - if editor.set_wrap_width(Some(editor_width), cx) { - snapshot = editor.snapshot(window, cx); - } + if !matches!(editor.soft_wrap_mode(cx), SoftWrap::None) + && editor.set_wrap_width(Some(editor_width), cx) + { + snapshot = editor.snapshot(window, cx); } let scroll_height = (snapshot.max_point().row().next_row().0 as f32) * line_height; @@ -10085,6 +10190,71 @@ mod tests { use std::num::NonZeroU32; use util::test::sample_text; + #[gpui::test] + async fn test_soft_wrap_editor_width_auto_height_editor(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let window = cx.add_window(|window, cx| { + let buffer = MultiBuffer::build_simple(&"a ".to_string().repeat(100), cx); + let mut editor = Editor::new( + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + buffer, + None, + window, + cx, + ); + editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); + editor + }); + let cx = &mut VisualTestContext::from_window(*window, cx); + let editor = window.root(cx).unwrap(); + let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + + for x in 1..=100 { + let (_, state) = cx.draw( + Default::default(), + size(px(200. + 0.13 * x as f32), px(500.)), + |_, _| EditorElement::new(&editor, style.clone()), + ); + + assert!( + state.position_map.scroll_max.x == 0., + "Soft wrapped editor should have no horizontal scrolling!" + ); + } + } + + #[gpui::test] + async fn test_soft_wrap_editor_width_full_editor(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let window = cx.add_window(|window, cx| { + let buffer = MultiBuffer::build_simple(&"a ".to_string().repeat(100), cx); + let mut editor = Editor::new(EditorMode::full(), buffer, None, window, cx); + editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); + editor + }); + let cx = &mut VisualTestContext::from_window(*window, cx); + let editor = window.root(cx).unwrap(); + let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + + for x in 1..=100 { + let (_, state) = cx.draw( + Default::default(), + size(px(200. + 0.13 * x as f32), px(500.)), + |_, _| EditorElement::new(&editor, style.clone()), + ); + + assert!( + state.position_map.scroll_max.x == 0., + "Soft wrapped editor should have no horizontal scrolling!" + ); + } + } + #[gpui::test] fn test_shape_line_numbers(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index fc350a5a15b4f7b105872e61e5a2401d183c1a6d..b11617ccec22f08737ba40ab257e19a81eddfd89 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -213,8 +213,8 @@ impl GitBlame { let project_subscription = cx.subscribe(&project, { let buffer = buffer.clone(); - move |this, _, event, cx| match event { - project::Event::WorktreeUpdatedEntries(_, updated) => { + move |this, _, event, cx| { + if let project::Event::WorktreeUpdatedEntries(_, updated) = event { let project_entry_id = buffer.read(cx).entry_id(cx); if updated .iter() @@ -224,7 +224,6 @@ impl GitBlame { this.generate(cx); } } - _ => {} } }); @@ -292,7 +291,7 @@ impl GitBlame { let buffer_id = self.buffer_snapshot.remote_id(); let mut cursor = self.entries.cursor::(&()); - rows.into_iter().map(move |info| { + rows.iter().map(move |info| { let row = info .buffer_row .filter(|_| info.buffer_id == Some(buffer_id))?; @@ -312,10 +311,10 @@ impl GitBlame { .as_ref() .and_then(|entry| entry.author.as_ref()) .map(|author| author.len()); - if let Some(author_len) = author_len { - if author_len > max_author_length { - max_author_length = author_len; - } + if let Some(author_len) = author_len + && author_len > max_author_length + { + max_author_length = author_len; } } @@ -415,21 +414,20 @@ impl GitBlame { let old_end = cursor.end(); if row_edits .peek() - .map_or(true, |next_edit| next_edit.old.start >= old_end) + .is_none_or(|next_edit| next_edit.old.start >= old_end) + && let Some(entry) = cursor.item() { - if let Some(entry) = cursor.item() { - if old_end > edit.old.end { - new_entries.push( - GitBlameEntry { - rows: cursor.end() - edit.old.end, - blame: entry.blame.clone(), - }, - &(), - ); - } - - cursor.next(); + if old_end > edit.old.end { + new_entries.push( + GitBlameEntry { + rows: cursor.end() - edit.old.end, + blame: entry.blame.clone(), + }, + &(), + ); } + + cursor.next(); } } new_entries.append(cursor.suffix(), &()); diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs index e38197283d4a4e2623ecadb30d90d0363053fdc5..aa4e616924ad6bd47627bfd95e9a5c58587afc25 100644 --- a/crates/editor/src/highlight_matching_bracket.rs +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -1,6 +1,7 @@ use crate::{Editor, RangeToAnchorExt}; -use gpui::{Context, Window}; +use gpui::{Context, HighlightStyle, Window}; use language::CursorShape; +use theme::ActiveTheme; enum MatchingBracketHighlight {} @@ -9,7 +10,7 @@ pub fn refresh_matching_bracket_highlights( window: &mut Window, cx: &mut Context, ) { - editor.clear_background_highlights::(cx); + editor.clear_highlights::(cx); let newest_selection = editor.selections.newest::(cx); // Don't highlight brackets if the selection isn't empty @@ -35,12 +36,19 @@ pub fn refresh_matching_bracket_highlights( .buffer_snapshot .innermost_enclosing_bracket_ranges(head..tail, None) { - editor.highlight_background::( - &[ + editor.highlight_text::( + vec![ opening_range.to_anchors(&snapshot.buffer_snapshot), closing_range.to_anchors(&snapshot.buffer_snapshot), ], - |theme| theme.colors().editor_document_highlight_bracket_background, + HighlightStyle { + background_color: Some( + cx.theme() + .colors() + .editor_document_highlight_bracket_background, + ), + ..Default::default() + }, cx, ) } @@ -104,7 +112,7 @@ mod tests { another_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test«(»"Test argument"«)» { another_test(1, 2, 3); } @@ -115,7 +123,7 @@ mod tests { another_test(1, ˇ2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test("Test argument") { another_test«(»1, 2, 3«)»; } @@ -126,7 +134,7 @@ mod tests { anotherˇ_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test("Test argument") «{» another_test(1, 2, 3); «}» @@ -138,7 +146,7 @@ mod tests { another_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test("Test argument") { another_test(1, 2, 3); } @@ -150,8 +158,8 @@ mod tests { another_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" - pub fn test("Test argument") { + cx.assert_editor_text_highlights::(indoc! {r#" + pub fn test«("Test argument") { another_test(1, 2, 3); } "#}); diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 02f93e6829a3f7ac08ec7dfa390cd846560bb7d5..94f49f601a101cd8ca2556df9ec1568b5e7337fa 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -271,7 +271,7 @@ impl Editor { Task::ready(Ok(Navigated::No)) }; self.select(SelectPhase::End, window, cx); - return navigate_task; + navigate_task } } @@ -321,7 +321,10 @@ pub fn update_inlay_link_and_hover_points( if let Some(cached_hint) = inlay_hint_cache.hint_by_id(excerpt_id, hovered_hint.id) { match cached_hint.resolve_state { ResolveState::CanResolve(_, _) => { - if let Some(buffer_id) = previous_valid_anchor.buffer_id { + if let Some(buffer_id) = snapshot + .buffer_snapshot + .buffer_id_for_anchor(previous_valid_anchor) + { inlay_hint_cache.spawn_hint_resolve( buffer_id, excerpt_id, @@ -418,24 +421,22 @@ pub fn update_inlay_link_and_hover_points( } if let Some((language_server_id, location)) = hovered_hint_part.location + && secondary_held + && !editor.has_pending_nonempty_selection() { - if secondary_held - && !editor.has_pending_nonempty_selection() - { - go_to_definition_updated = true; - show_link_definition( - shift_held, - editor, - TriggerPoint::InlayHint( - highlight, - location, - language_server_id, - ), - snapshot, - window, - cx, - ); - } + go_to_definition_updated = true; + show_link_definition( + shift_held, + editor, + TriggerPoint::InlayHint( + highlight, + location, + language_server_id, + ), + snapshot, + window, + cx, + ); } } } @@ -561,7 +562,7 @@ pub fn show_link_definition( provider.definitions(&buffer, buffer_position, preferred_kind, cx) })?; if let Some(task) = task { - task.await.ok().map(|definition_result| { + task.await.ok().flatten().map(|definition_result| { ( definition_result.iter().find_map(|link| { link.origin.as_ref().and_then(|origin| { @@ -657,11 +658,11 @@ pub fn show_link_definition( pub(crate) fn find_url( buffer: &Entity, position: text::Anchor, - mut cx: AsyncWindowContext, + cx: AsyncWindowContext, ) -> Option<(Range, String)> { const LIMIT: usize = 2048; - let Ok(snapshot) = buffer.read_with(&mut cx, |buffer, _| buffer.snapshot()) else { + let Ok(snapshot) = buffer.read_with(&cx, |buffer, _| buffer.snapshot()) else { return None; }; @@ -719,11 +720,11 @@ pub(crate) fn find_url( pub(crate) fn find_url_from_range( buffer: &Entity, range: Range, - mut cx: AsyncWindowContext, + cx: AsyncWindowContext, ) -> Option { const LIMIT: usize = 2048; - let Ok(snapshot) = buffer.read_with(&mut cx, |buffer, _| buffer.snapshot()) else { + let Ok(snapshot) = buffer.read_with(&cx, |buffer, _| buffer.snapshot()) else { return None; }; @@ -766,10 +767,11 @@ pub(crate) fn find_url_from_range( let mut finder = LinkFinder::new(); finder.kinds(&[LinkKind::Url]); - if let Some(link) = finder.links(&text).next() { - if link.start() == 0 && link.end() == text.len() { - return Some(link.as_str().to_string()); - } + if let Some(link) = finder.links(&text).next() + && link.start() == 0 + && link.end() == text.len() + { + return Some(link.as_str().to_string()); } None @@ -794,7 +796,7 @@ pub(crate) async fn find_file( ) -> Option { project .update(cx, |project, cx| { - project.resolve_path_in_buffer(&candidate_file_path, buffer, cx) + project.resolve_path_in_buffer(candidate_file_path, buffer, cx) }) .ok()? .await @@ -872,7 +874,7 @@ fn surrounding_filename( .peekable(); while let Some(ch) = forwards.next() { // Skip escaped whitespace - if ch == '\\' && forwards.peek().map_or(false, |ch| ch.is_whitespace()) { + if ch == '\\' && forwards.peek().is_some_and(|ch| ch.is_whitespace()) { token_end += ch.len_utf8(); let whitespace = forwards.next().unwrap(); token_end += whitespace.len_utf8(); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index bda229e34669482549182b2c7abbe2c3efb9a751..fab53457876866223be6b7d32f964cd1abd1dd28 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -142,11 +142,11 @@ pub fn hover_at_inlay( .info_popovers .iter() .any(|InfoPopover { symbol_range, .. }| { - if let RangeInEditor::Inlay(range) = symbol_range { - if range == &inlay_hover.range { - // Hover triggered from same location as last time. Don't show again. - return true; - } + if let RangeInEditor::Inlay(range) = symbol_range + && range == &inlay_hover.range + { + // Hover triggered from same location as last time. Don't show again. + return true; } false }) @@ -167,17 +167,16 @@ pub fn hover_at_inlay( let language_registry = project.read_with(cx, |p, _| p.languages().clone())?; let blocks = vec![inlay_hover.tooltip]; - let parsed_content = parse_blocks(&blocks, &language_registry, None, cx).await; + let parsed_content = + parse_blocks(&blocks, Some(&language_registry), None, cx).await; let scroll_handle = ScrollHandle::new(); let subscription = this .update(cx, |_, cx| { - if let Some(parsed_content) = &parsed_content { - Some(cx.observe(parsed_content, |_, _, cx| cx.notify())) - } else { - None - } + parsed_content.as_ref().map(|parsed_content| { + cx.observe(parsed_content, |_, _, cx| cx.notify()) + }) }) .ok() .flatten(); @@ -251,7 +250,9 @@ fn show_hover( let (excerpt_id, _, _) = editor.buffer().read(cx).excerpt_containing(anchor, cx)?; - let language_registry = editor.project.as_ref()?.read(cx).languages().clone(); + let language_registry = editor + .project() + .map(|project| project.read(cx).languages().clone()); let provider = editor.semantics_provider.clone()?; if !ignore_timeout { @@ -267,13 +268,12 @@ fn show_hover( } // Don't request again if the location is the same as the previous request - if let Some(triggered_from) = &editor.hover_state.triggered_from { - if triggered_from + if let Some(triggered_from) = &editor.hover_state.triggered_from + && triggered_from .cmp(&anchor, &snapshot.buffer_snapshot) .is_eq() - { - return None; - } + { + return None; } let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay; @@ -428,7 +428,7 @@ fn show_hover( }; let hovers_response = if let Some(hover_request) = hover_request { - hover_request.await + hover_request.await.unwrap_or_default() } else { Vec::new() }; @@ -443,15 +443,14 @@ fn show_hover( text: format!("Unicode character U+{:02X}", invisible as u32), kind: HoverBlockKind::PlainText, }]; - let parsed_content = parse_blocks(&blocks, &language_registry, None, cx).await; + let parsed_content = + parse_blocks(&blocks, language_registry.as_ref(), None, cx).await; let scroll_handle = ScrollHandle::new(); let subscription = this .update(cx, |_, cx| { - if let Some(parsed_content) = &parsed_content { - Some(cx.observe(parsed_content, |_, _, cx| cx.notify())) - } else { - None - } + parsed_content.as_ref().map(|parsed_content| { + cx.observe(parsed_content, |_, _, cx| cx.notify()) + }) }) .ok() .flatten(); @@ -493,16 +492,15 @@ fn show_hover( let blocks = hover_result.contents; let language = hover_result.language; - let parsed_content = parse_blocks(&blocks, &language_registry, language, cx).await; + let parsed_content = + parse_blocks(&blocks, language_registry.as_ref(), language, cx).await; let scroll_handle = ScrollHandle::new(); hover_highlights.push(range.clone()); let subscription = this .update(cx, |_, cx| { - if let Some(parsed_content) = &parsed_content { - Some(cx.observe(parsed_content, |_, _, cx| cx.notify())) - } else { - None - } + parsed_content.as_ref().map(|parsed_content| { + cx.observe(parsed_content, |_, _, cx| cx.notify()) + }) }) .ok() .flatten(); @@ -583,7 +581,7 @@ fn same_diagnostic_hover(editor: &Editor, snapshot: &EditorSnapshot, anchor: Anc async fn parse_blocks( blocks: &[HoverBlock], - language_registry: &Arc, + language_registry: Option<&Arc>, language: Option>, cx: &mut AsyncWindowContext, ) -> Option> { @@ -599,18 +597,15 @@ async fn parse_blocks( }) .join("\n\n"); - let rendered_block = cx - .new_window_entity(|_window, cx| { - Markdown::new( - combined_text.into(), - Some(language_registry.clone()), - language.map(|language| language.name()), - cx, - ) - }) - .ok(); - - rendered_block + cx.new_window_entity(|_window, cx| { + Markdown::new( + combined_text.into(), + language_registry.cloned(), + language.map(|language| language.name()), + cx, + ) + }) + .ok() } pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { @@ -622,7 +617,7 @@ pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { let mut base_text_style = window.text_style(); base_text_style.refine(&TextStyleRefinement { - font_family: Some(ui_font_family.clone()), + font_family: Some(ui_font_family), font_fallbacks: ui_font_fallbacks, color: Some(cx.theme().colors().editor_foreground), ..Default::default() @@ -671,7 +666,7 @@ pub fn diagnostics_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { let mut base_text_style = window.text_style(); base_text_style.refine(&TextStyleRefinement { - font_family: Some(ui_font_family.clone()), + font_family: Some(ui_font_family), font_fallbacks: ui_font_fallbacks, color: Some(cx.theme().colors().editor_foreground), ..Default::default() @@ -712,59 +707,54 @@ pub fn diagnostics_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { } pub fn open_markdown_url(link: SharedString, window: &mut Window, cx: &mut App) { - if let Ok(uri) = Url::parse(&link) { - if uri.scheme() == "file" { - if let Some(workspace) = window.root::().flatten() { - workspace.update(cx, |workspace, cx| { - let task = workspace.open_abs_path( - PathBuf::from(uri.path()), - OpenOptions { - visible: Some(OpenVisible::None), - ..Default::default() - }, - window, - cx, - ); + if let Ok(uri) = Url::parse(&link) + && uri.scheme() == "file" + && let Some(workspace) = window.root::().flatten() + { + workspace.update(cx, |workspace, cx| { + let task = workspace.open_abs_path( + PathBuf::from(uri.path()), + OpenOptions { + visible: Some(OpenVisible::None), + ..Default::default() + }, + window, + cx, + ); - cx.spawn_in(window, async move |_, cx| { - let item = task.await?; - // Ruby LSP uses URLs with #L1,1-4,4 - // we'll just take the first number and assume it's a line number - let Some(fragment) = uri.fragment() else { - return anyhow::Ok(()); - }; - let mut accum = 0u32; - for c in fragment.chars() { - if c >= '0' && c <= '9' && accum < u32::MAX / 2 { - accum *= 10; - accum += c as u32 - '0' as u32; - } else if accum > 0 { - break; - } - } - if accum == 0 { - return Ok(()); - } - let Some(editor) = cx.update(|_, cx| item.act_as::(cx))? else { - return Ok(()); - }; - editor.update_in(cx, |editor, window, cx| { - editor.change_selections( - Default::default(), - window, - cx, - |selections| { - selections.select_ranges([text::Point::new(accum - 1, 0) - ..text::Point::new(accum - 1, 0)]); - }, - ); - }) - }) - .detach_and_log_err(cx); - }); - return; - } - } + cx.spawn_in(window, async move |_, cx| { + let item = task.await?; + // Ruby LSP uses URLs with #L1,1-4,4 + // we'll just take the first number and assume it's a line number + let Some(fragment) = uri.fragment() else { + return anyhow::Ok(()); + }; + let mut accum = 0u32; + for c in fragment.chars() { + if c >= '0' && c <= '9' && accum < u32::MAX / 2 { + accum *= 10; + accum += c as u32 - '0' as u32; + } else if accum > 0 { + break; + } + } + if accum == 0 { + return Ok(()); + } + let Some(editor) = cx.update(|_, cx| item.act_as::(cx))? else { + return Ok(()); + }; + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |selections| { + selections.select_ranges([ + text::Point::new(accum - 1, 0)..text::Point::new(accum - 1, 0) + ]); + }); + }) + }) + .detach_and_log_err(cx); + }); + return; } cx.open_url(&link); } @@ -834,20 +824,19 @@ impl HoverState { pub fn focused(&self, window: &mut Window, cx: &mut Context) -> bool { let mut hover_popover_is_focused = false; for info_popover in &self.info_popovers { - if let Some(markdown_view) = &info_popover.parsed_content { - if markdown_view.focus_handle(cx).is_focused(window) { - hover_popover_is_focused = true; - } + if let Some(markdown_view) = &info_popover.parsed_content + && markdown_view.focus_handle(cx).is_focused(window) + { + hover_popover_is_focused = true; } } - if let Some(diagnostic_popover) = &self.diagnostic_popover { - if diagnostic_popover + if let Some(diagnostic_popover) = &self.diagnostic_popover + && diagnostic_popover .markdown .focus_handle(cx) .is_focused(window) - { - hover_popover_is_focused = true; - } + { + hover_popover_is_focused = true; } hover_popover_is_focused } diff --git a/crates/editor/src/indent_guides.rs b/crates/editor/src/indent_guides.rs index f6d51c929a95ac3d256095b627c303a6c49a64a5..23717eeb158cea0f01e6a4efca6d2ff14a8fa824 100644 --- a/crates/editor/src/indent_guides.rs +++ b/crates/editor/src/indent_guides.rs @@ -164,15 +164,15 @@ pub fn indent_guides_in_range( let end_anchor = snapshot.buffer_snapshot.anchor_after(end_offset); let mut fold_ranges = Vec::>::new(); - let mut folds = snapshot.folds_in_range(start_offset..end_offset).peekable(); - while let Some(fold) = folds.next() { + let folds = snapshot.folds_in_range(start_offset..end_offset).peekable(); + for fold in folds { let start = fold.range.start.to_point(&snapshot.buffer_snapshot); let end = fold.range.end.to_point(&snapshot.buffer_snapshot); - if let Some(last_range) = fold_ranges.last_mut() { - if last_range.end >= start { - last_range.end = last_range.end.max(end); - continue; - } + if let Some(last_range) = fold_ranges.last_mut() + && last_range.end >= start + { + last_range.end = last_range.end.max(end); + continue; } fold_ranges.push(start..end); } diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 60ad0e5bf6c5672a3ce651793b8f76a82ab4c0ff..dbf5ac95b78433c9a67da110e804a8973e51dee1 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -475,10 +475,7 @@ impl InlayHintCache { let excerpt_cached_hints = excerpt_cached_hints.read(); let mut excerpt_cache = excerpt_cached_hints.ordered_hints.iter().fuse().peekable(); shown_excerpt_hints_to_remove.retain(|(shown_anchor, shown_hint_id)| { - let Some(buffer) = shown_anchor - .buffer_id - .and_then(|buffer_id| multi_buffer.buffer(buffer_id)) - else { + let Some(buffer) = multi_buffer.buffer_for_anchor(*shown_anchor, cx) else { return false; }; let buffer_snapshot = buffer.read(cx).snapshot(); @@ -498,16 +495,14 @@ impl InlayHintCache { cmp::Ordering::Less | cmp::Ordering::Equal => { if !old_kinds.contains(&cached_hint.kind) && new_kinds.contains(&cached_hint.kind) - { - if let Some(anchor) = multi_buffer_snapshot + && let Some(anchor) = multi_buffer_snapshot .anchor_in_excerpt(*excerpt_id, cached_hint.position) - { - to_insert.push(Inlay::hint( - cached_hint_id.id(), - anchor, - cached_hint, - )); - } + { + to_insert.push(Inlay::hint( + cached_hint_id.id(), + anchor, + cached_hint, + )); } excerpt_cache.next(); } @@ -522,16 +517,16 @@ impl InlayHintCache { for cached_hint_id in excerpt_cache { let maybe_missed_cached_hint = &excerpt_cached_hints.hints_by_id[cached_hint_id]; let cached_hint_kind = maybe_missed_cached_hint.kind; - if !old_kinds.contains(&cached_hint_kind) && new_kinds.contains(&cached_hint_kind) { - if let Some(anchor) = multi_buffer_snapshot + if !old_kinds.contains(&cached_hint_kind) + && new_kinds.contains(&cached_hint_kind) + && let Some(anchor) = multi_buffer_snapshot .anchor_in_excerpt(*excerpt_id, maybe_missed_cached_hint.position) - { - to_insert.push(Inlay::hint( - cached_hint_id.id(), - anchor, - maybe_missed_cached_hint, - )); - } + { + to_insert.push(Inlay::hint( + cached_hint_id.id(), + anchor, + maybe_missed_cached_hint, + )); } } } @@ -620,44 +615,44 @@ impl InlayHintCache { ) { if let Some(excerpt_hints) = self.hints.get(&excerpt_id) { let mut guard = excerpt_hints.write(); - if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) { - if let ResolveState::CanResolve(server_id, _) = &cached_hint.resolve_state { - let hint_to_resolve = cached_hint.clone(); - let server_id = *server_id; - cached_hint.resolve_state = ResolveState::Resolving; - drop(guard); - cx.spawn_in(window, async move |editor, cx| { - let resolved_hint_task = editor.update(cx, |editor, cx| { - let buffer = editor.buffer().read(cx).buffer(buffer_id)?; - editor.semantics_provider.as_ref()?.resolve_inlay_hint( - hint_to_resolve, - buffer, - server_id, - cx, - ) - })?; - if let Some(resolved_hint_task) = resolved_hint_task { - let mut resolved_hint = - resolved_hint_task.await.context("hint resolve task")?; - editor.read_with(cx, |editor, _| { - if let Some(excerpt_hints) = - editor.inlay_hint_cache.hints.get(&excerpt_id) + if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) + && let ResolveState::CanResolve(server_id, _) = &cached_hint.resolve_state + { + let hint_to_resolve = cached_hint.clone(); + let server_id = *server_id; + cached_hint.resolve_state = ResolveState::Resolving; + drop(guard); + cx.spawn_in(window, async move |editor, cx| { + let resolved_hint_task = editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).buffer(buffer_id)?; + editor.semantics_provider.as_ref()?.resolve_inlay_hint( + hint_to_resolve, + buffer, + server_id, + cx, + ) + })?; + if let Some(resolved_hint_task) = resolved_hint_task { + let mut resolved_hint = + resolved_hint_task.await.context("hint resolve task")?; + editor.read_with(cx, |editor, _| { + if let Some(excerpt_hints) = + editor.inlay_hint_cache.hints.get(&excerpt_id) + { + let mut guard = excerpt_hints.write(); + if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) + && cached_hint.resolve_state == ResolveState::Resolving { - let mut guard = excerpt_hints.write(); - if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) { - if cached_hint.resolve_state == ResolveState::Resolving { - resolved_hint.resolve_state = ResolveState::Resolved; - *cached_hint = resolved_hint; - } - } + resolved_hint.resolve_state = ResolveState::Resolved; + *cached_hint = resolved_hint; } - })?; - } + } + })?; + } - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); } } } @@ -990,8 +985,8 @@ fn fetch_and_update_hints( let buffer = editor.buffer().read(cx).buffer(query.buffer_id)?; - if !editor.registered_buffers.contains_key(&query.buffer_id) { - if let Some(project) = editor.project.as_ref() { + if !editor.registered_buffers.contains_key(&query.buffer_id) + && let Some(project) = editor.project.as_ref() { project.update(cx, |project, cx| { editor.registered_buffers.insert( query.buffer_id, @@ -999,7 +994,6 @@ fn fetch_and_update_hints( ); }) } - } editor .semantics_provider @@ -1240,14 +1234,12 @@ fn apply_hint_update( .inlay_hint_cache .allowed_hint_kinds .contains(&new_hint.kind) - { - if let Some(new_hint_position) = + && let Some(new_hint_position) = multi_buffer_snapshot.anchor_in_excerpt(query.excerpt_id, new_hint.position) - { - splice - .to_insert - .push(Inlay::hint(new_inlay_id, new_hint_position, &new_hint)); - } + { + splice + .to_insert + .push(Inlay::hint(new_inlay_id, new_hint_position, &new_hint)); } let new_id = InlayId::Hint(new_inlay_id); cached_excerpt_hints.hints_by_id.insert(new_id, new_hint); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index ca635a2132790e809258c8bd63fbd3a1c3edcdb3..afc5767de010d67bdbe3e6fd21e1ffcfe840b801 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1,7 +1,7 @@ use crate::{ Anchor, Autoscroll, Editor, EditorEvent, EditorSettings, ExcerptId, ExcerptRange, FormatTarget, - MultiBuffer, MultiBufferSnapshot, NavigationData, SearchWithinRange, SelectionEffects, - ToPoint as _, + MultiBuffer, MultiBufferSnapshot, NavigationData, ReportEditorEvent, SearchWithinRange, + SelectionEffects, ToPoint as _, display_map::HighlightKey, editor_settings::SeedQuerySetting, persistence::{DB, SerializedEditor}, @@ -103,9 +103,9 @@ impl FollowableItem for Editor { multibuffer = MultiBuffer::new(project.read(cx).capability()); let mut sorted_excerpts = state.excerpts.clone(); sorted_excerpts.sort_by_key(|e| e.id); - let mut sorted_excerpts = sorted_excerpts.into_iter().peekable(); + let sorted_excerpts = sorted_excerpts.into_iter().peekable(); - while let Some(excerpt) = sorted_excerpts.next() { + for excerpt in sorted_excerpts { let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { continue; }; @@ -201,7 +201,7 @@ impl FollowableItem for Editor { if buffer .as_singleton() .and_then(|buffer| buffer.read(cx).file()) - .map_or(false, |file| file.is_private()) + .is_some_and(|file| file.is_private()) { return None; } @@ -293,7 +293,7 @@ impl FollowableItem for Editor { EditorEvent::ExcerptsRemoved { ids, .. } => { update .deleted_excerpts - .extend(ids.iter().map(ExcerptId::to_proto)); + .extend(ids.iter().copied().map(ExcerptId::to_proto)); true } EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => { @@ -524,8 +524,8 @@ fn serialize_selection( ) -> proto::Selection { proto::Selection { id: selection.id as u64, - start: Some(serialize_anchor(&selection.start, &buffer)), - end: Some(serialize_anchor(&selection.end, &buffer)), + start: Some(serialize_anchor(&selection.start, buffer)), + end: Some(serialize_anchor(&selection.end, buffer)), reversed: selection.reversed, } } @@ -654,6 +654,10 @@ impl Item for Editor { } } + fn suggested_filename(&self, cx: &App) -> SharedString { + self.buffer.read(cx).title(cx).to_string().into() + } + fn tab_icon(&self, _: &Window, cx: &App) -> Option { ItemSettings::get_global(cx) .file_icons @@ -674,7 +678,7 @@ impl Item for Editor { let buffer = buffer.read(cx); let path = buffer.project_path(cx)?; let buffer_id = buffer.remote_id(); - let project = self.project.as_ref()?.read(cx); + let project = self.project()?.read(cx); let entry = project.entry_for_path(&path, cx)?; let (repo, repo_path) = project .git_store() @@ -711,7 +715,7 @@ impl Item for Editor { .read(cx) .as_singleton() .and_then(|buffer| buffer.read(cx).file()) - .map_or(false, |file| file.disk_state() == DiskState::Deleted); + .is_some_and(|file| file.disk_state() == DiskState::Deleted); h_flex() .gap_2() @@ -776,6 +780,10 @@ impl Item for Editor { } } + fn on_removed(&self, cx: &App) { + self.report_editor_event(ReportEditorEvent::Closed, None, cx); + } + fn deactivated(&mut self, _: &mut Window, cx: &mut Context) { let selection = self.selections.newest_anchor(); self.push_to_nav_history(selection.head(), None, true, false, cx); @@ -815,9 +823,9 @@ impl Item for Editor { ) -> Task> { // Add meta data tracking # of auto saves if options.autosave { - self.report_editor_event("Editor Autosaved", None, cx); + self.report_editor_event(ReportEditorEvent::Saved { auto_saved: true }, None, cx); } else { - self.report_editor_event("Editor Saved", None, cx); + self.report_editor_event(ReportEditorEvent::Saved { auto_saved: false }, None, cx); } let buffers = self.buffer().clone().read(cx).all_buffers(); @@ -896,7 +904,11 @@ impl Item for Editor { .path .extension() .map(|a| a.to_string_lossy().to_string()); - self.report_editor_event("Editor Saved", file_extension, cx); + self.report_editor_event( + ReportEditorEvent::Saved { auto_saved: false }, + file_extension, + cx, + ); project.update(cx, |project, cx| project.save_buffer_as(buffer, path, cx)) } @@ -918,10 +930,10 @@ impl Item for Editor { })?; buffer .update(cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !buffer.is_singleton() { - buffer.push_transaction(&transaction.0, cx); - } + if let Some(transaction) = transaction + && !buffer.is_singleton() + { + buffer.push_transaction(&transaction.0, cx); } }) .ok(); @@ -997,8 +1009,8 @@ impl Item for Editor { ) { self.workspace = Some((workspace.weak_handle(), workspace.database_id())); if let Some(workspace) = &workspace.weak_handle().upgrade() { - cx.subscribe(&workspace, |editor, _, event: &workspace::Event, _cx| { - if matches!(event, workspace::Event::ModalOpened) { + cx.subscribe(workspace, |editor, _, event: &workspace::Event, _cx| { + if let workspace::Event::ModalOpened = event { editor.mouse_context_menu.take(); editor.inline_blame_popover.take(); } @@ -1024,6 +1036,10 @@ impl Item for Editor { f(ItemEvent::UpdateBreadcrumbs); } + EditorEvent::BreadcrumbsChanged => { + f(ItemEvent::UpdateBreadcrumbs); + } + EditorEvent::DirtyChanged => { f(ItemEvent::UpdateTab); } @@ -1276,7 +1292,7 @@ impl SerializableItem for Editor { project .read(cx) .worktree_for_id(worktree_id, cx) - .and_then(|worktree| worktree.read(cx).absolutize(&file.path()).ok()) + .and_then(|worktree| worktree.read(cx).absolutize(file.path()).ok()) .or_else(|| { let full_path = file.full_path(cx); let project_path = project.read(cx).find_project_path(&full_path, cx)?; @@ -1354,36 +1370,33 @@ impl ProjectItem for Editor { let mut editor = Self::for_buffer(buffer.clone(), Some(project), window, cx); if let Some((excerpt_id, buffer_id, snapshot)) = editor.buffer().read(cx).snapshot(cx).as_singleton() + && WorkspaceSettings::get(None, cx).restore_on_file_reopen + && let Some(restoration_data) = Self::project_item_kind() + .and_then(|kind| pane.as_ref()?.project_item_restoration_data.get(&kind)) + .and_then(|data| data.downcast_ref::()) + .and_then(|data| { + let file = project::File::from_dyn(buffer.read(cx).file())?; + data.entries.get(&file.abs_path(cx)) + }) { - if WorkspaceSettings::get(None, cx).restore_on_file_reopen { - if let Some(restoration_data) = Self::project_item_kind() - .and_then(|kind| pane.as_ref()?.project_item_restoration_data.get(&kind)) - .and_then(|data| data.downcast_ref::()) - .and_then(|data| { - let file = project::File::from_dyn(buffer.read(cx).file())?; - data.entries.get(&file.abs_path(cx)) - }) - { - editor.fold_ranges( - clip_ranges(&restoration_data.folds, &snapshot), - false, - window, - cx, - ); - if !restoration_data.selections.is_empty() { - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(clip_ranges(&restoration_data.selections, &snapshot)); - }); - } - let (top_row, offset) = restoration_data.scroll_position; - let anchor = Anchor::in_buffer( - *excerpt_id, - buffer_id, - snapshot.anchor_before(Point::new(top_row, 0)), - ); - editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx); - } + editor.fold_ranges( + clip_ranges(&restoration_data.folds, snapshot), + false, + window, + cx, + ); + if !restoration_data.selections.is_empty() { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges(clip_ranges(&restoration_data.selections, snapshot)); + }); } + let (top_row, offset) = restoration_data.scroll_position; + let anchor = Anchor::in_buffer( + *excerpt_id, + buffer_id, + snapshot.anchor_before(Point::new(top_row, 0)), + ); + editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx); } editor @@ -1825,7 +1838,7 @@ pub fn entry_diagnostic_aware_icon_name_and_color( diagnostic_severity: Option, ) -> Option<(IconName, Color)> { match diagnostic_severity { - Some(DiagnosticSeverity::ERROR) => Some((IconName::X, Color::Error)), + Some(DiagnosticSeverity::ERROR) => Some((IconName::Close, Color::Error)), Some(DiagnosticSeverity::WARNING) => Some((IconName::Triangle, Color::Warning)), _ => None, } diff --git a/crates/editor/src/jsx_tag_auto_close.rs b/crates/editor/src/jsx_tag_auto_close.rs index 95a792583953e02a77e592ea957b752f0f8042bb..e6c518beae3ecf3741b5f74be6087628f5231c8c 100644 --- a/crates/editor/src/jsx_tag_auto_close.rs +++ b/crates/editor/src/jsx_tag_auto_close.rs @@ -37,7 +37,7 @@ pub(crate) fn should_auto_close( let text = buffer .text_for_range(edited_range.clone()) .collect::(); - let edited_range = edited_range.to_offset(&buffer); + let edited_range = edited_range.to_offset(buffer); if !text.ends_with(">") { continue; } @@ -51,12 +51,11 @@ pub(crate) fn should_auto_close( continue; }; let mut jsx_open_tag_node = node; - if node.grammar_name() != config.open_tag_node_name { - if let Some(parent) = node.parent() { - if parent.grammar_name() == config.open_tag_node_name { - jsx_open_tag_node = parent; - } - } + if node.grammar_name() != config.open_tag_node_name + && let Some(parent) = node.parent() + && parent.grammar_name() == config.open_tag_node_name + { + jsx_open_tag_node = parent; } if jsx_open_tag_node.grammar_name() != config.open_tag_node_name { continue; @@ -87,9 +86,9 @@ pub(crate) fn should_auto_close( }); } if to_auto_edit.is_empty() { - return None; + None } else { - return Some(to_auto_edit); + Some(to_auto_edit) } } @@ -182,12 +181,12 @@ pub(crate) fn generate_auto_close_edits( */ { let tag_node_name_equals = |node: &Node, name: &str| { - let is_empty = name.len() == 0; + let is_empty = name.is_empty(); if let Some(node_name) = node.named_child(TS_NODE_TAG_NAME_CHILD_INDEX) { let range = node_name.byte_range(); return buffer.text_for_range(range).equals_str(name); } - return is_empty; + is_empty }; let tree_root_node = { @@ -208,7 +207,7 @@ pub(crate) fn generate_auto_close_edits( cur = descendant; } - assert!(ancestors.len() > 0); + assert!(!ancestors.is_empty()); let mut tree_root_node = open_tag; @@ -228,7 +227,7 @@ pub(crate) fn generate_auto_close_edits( let has_open_tag_with_same_tag_name = ancestor .named_child(0) .filter(|n| n.kind() == config.open_tag_node_name) - .map_or(false, |element_open_tag_node| { + .is_some_and(|element_open_tag_node| { tag_node_name_equals(&element_open_tag_node, &tag_name) }); if has_open_tag_with_same_tag_name { @@ -264,8 +263,7 @@ pub(crate) fn generate_auto_close_edits( } let is_after_open_tag = |node: &Node| { - return node.start_byte() < open_tag.start_byte() - && node.end_byte() < open_tag.start_byte(); + node.start_byte() < open_tag.start_byte() && node.end_byte() < open_tag.start_byte() }; // perf: use cursor for more efficient traversal @@ -284,10 +282,8 @@ pub(crate) fn generate_auto_close_edits( unclosed_open_tag_count -= 1; } } else if has_erroneous_close_tag && kind == erroneous_close_tag_node_name { - if tag_node_name_equals(&node, &tag_name) { - if !is_after_open_tag(&node) { - unclosed_open_tag_count -= 1; - } + if tag_node_name_equals(&node, &tag_name) && !is_after_open_tag(&node) { + unclosed_open_tag_count -= 1; } } else if kind == config.jsx_element_node_name { // perf: filter only open,close,element,erroneous nodes @@ -304,7 +300,7 @@ pub(crate) fn generate_auto_close_edits( let edit_range = edit_anchor..edit_anchor; edits.push((edit_range, format!("", tag_name))); } - return Ok(edits); + Ok(edits) } pub(crate) fn refresh_enabled_in_any_buffer( @@ -370,7 +366,7 @@ pub(crate) fn construct_initial_buffer_versions_map< initial_buffer_versions.insert(buffer_id, buffer_version); } } - return initial_buffer_versions; + initial_buffer_versions } pub(crate) fn handle_from( @@ -458,12 +454,9 @@ pub(crate) fn handle_from( let ensure_no_edits_since_start = || -> Option<()> { let has_edits_since_start = this .read_with(cx, |this, cx| { - this.buffer - .read(cx) - .buffer(buffer_id) - .map_or(true, |buffer| { - buffer.read(cx).has_edits_since(&buffer_version_initial) - }) + this.buffer.read(cx).buffer(buffer_id).is_none_or(|buffer| { + buffer.read(cx).has_edits_since(&buffer_version_initial) + }) }) .ok()?; @@ -514,7 +507,7 @@ pub(crate) fn handle_from( { let selections = this - .read_with(cx, |this, _| this.selections.disjoint_anchors().clone()) + .read_with(cx, |this, _| this.selections.disjoint_anchors()) .ok()?; for selection in selections.iter() { let Some(selection_buffer_offset_head) = @@ -815,10 +808,7 @@ mod jsx_tag_autoclose_tests { ); buf }); - let buffer_c = cx.new(|cx| { - let buf = language::Buffer::local("(cx) .into_iter() .any(|s| !s.is_empty()); - let has_git_repo = anchor.buffer_id.is_some_and(|buffer_id| { - project - .read(cx) - .git_store() - .read(cx) - .repository_and_path_for_buffer_id(buffer_id, cx) - .is_some() - }); + let has_git_repo = buffer + .buffer_id_for_anchor(anchor) + .is_some_and(|buffer_id| { + project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer_id, cx) + .is_some() + }); let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx); let run_to_cursor = window.is_action_available(&RunToCursor, cx); diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index a8850984a191be89400097d20c0992e3664aff44..7a008e3ba257173429448a02be7abe5ab00cedec 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -230,7 +230,7 @@ pub fn indented_line_beginning( if stop_at_soft_boundaries && soft_line_start > indent_start && display_point != soft_line_start { soft_line_start - } else if stop_at_indent && display_point != indent_start { + } else if stop_at_indent && (display_point > indent_start || display_point == line_start) { indent_start } else { line_start @@ -439,17 +439,17 @@ pub fn start_of_excerpt( }; match direction { Direction::Prev => { - let mut start = excerpt.start_anchor().to_display_point(&map); + let mut start = excerpt.start_anchor().to_display_point(map); if start >= display_point && start.row() > DisplayRow(0) { let Some(excerpt) = map.buffer_snapshot.excerpt_before(excerpt.id()) else { return display_point; }; - start = excerpt.start_anchor().to_display_point(&map); + start = excerpt.start_anchor().to_display_point(map); } start } Direction::Next => { - let mut end = excerpt.end_anchor().to_display_point(&map); + let mut end = excerpt.end_anchor().to_display_point(map); *end.row_mut() += 1; map.clip_point(end, Bias::Right) } @@ -467,7 +467,7 @@ pub fn end_of_excerpt( }; match direction { Direction::Prev => { - let mut start = excerpt.start_anchor().to_display_point(&map); + let mut start = excerpt.start_anchor().to_display_point(map); if start.row() > DisplayRow(0) { *start.row_mut() -= 1; } @@ -476,7 +476,7 @@ pub fn end_of_excerpt( start } Direction::Next => { - let mut end = excerpt.end_anchor().to_display_point(&map); + let mut end = excerpt.end_anchor().to_display_point(map); *end.column_mut() = 0; if end <= display_point { *end.row_mut() += 1; @@ -485,7 +485,7 @@ pub fn end_of_excerpt( else { return display_point; }; - end = excerpt.end_anchor().to_display_point(&map); + end = excerpt.end_anchor().to_display_point(map); *end.column_mut() = 0; } end @@ -510,10 +510,10 @@ pub fn find_preceding_boundary_point( if find_range == FindRange::SingleLine && ch == '\n' { break; } - if let Some(prev_ch) = prev_ch { - if is_boundary(ch, prev_ch) { - break; - } + if let Some(prev_ch) = prev_ch + && is_boundary(ch, prev_ch) + { + break; } offset -= ch.len_utf8(); @@ -562,13 +562,13 @@ pub fn find_boundary_point( if find_range == FindRange::SingleLine && ch == '\n' { break; } - if let Some(prev_ch) = prev_ch { - if is_boundary(prev_ch, ch) { - if return_point_before_boundary { - return map.clip_point(prev_offset.to_display_point(map), Bias::Right); - } else { - break; - } + if let Some(prev_ch) = prev_ch + && is_boundary(prev_ch, ch) + { + if return_point_before_boundary { + return map.clip_point(prev_offset.to_display_point(map), Bias::Right); + } else { + break; } } prev_offset = offset; @@ -603,13 +603,13 @@ pub fn find_preceding_boundary_trail( // Find the boundary let start_offset = offset; for ch in forward { - if let Some(prev_ch) = prev_ch { - if is_boundary(prev_ch, ch) { - if start_offset == offset { - trail_offset = Some(offset); - } else { - break; - } + if let Some(prev_ch) = prev_ch + && is_boundary(prev_ch, ch) + { + if start_offset == offset { + trail_offset = Some(offset); + } else { + break; } } offset -= ch.len_utf8(); @@ -651,13 +651,13 @@ pub fn find_boundary_trail( // Find the boundary let start_offset = offset; for ch in forward { - if let Some(prev_ch) = prev_ch { - if is_boundary(prev_ch, ch) { - if start_offset == offset { - trail_offset = Some(offset); - } else { - break; - } + if let Some(prev_ch) = prev_ch + && is_boundary(prev_ch, ch) + { + if start_offset == offset { + trail_offset = Some(offset); + } else { + break; } } offset += ch.len_utf8(); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 1ead45b3de89c0705510f8afc55ecf6176a4d7a2..2d4710a8d44a023f0c3206ad0c327a34c36fdac4 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -241,24 +241,13 @@ impl ProposedChangesEditor { event: &BufferEvent, _cx: &mut Context, ) { - match event { - BufferEvent::Operation { .. } => { - self.recalculate_diffs_tx - .unbounded_send(RecalculateDiff { - buffer, - debounce: true, - }) - .ok(); - } - // BufferEvent::DiffBaseChanged => { - // self.recalculate_diffs_tx - // .unbounded_send(RecalculateDiff { - // buffer, - // debounce: false, - // }) - // .ok(); - // } - _ => (), + if let BufferEvent::Operation { .. } = event { + self.recalculate_diffs_tx + .unbounded_send(RecalculateDiff { + buffer, + debounce: true, + }) + .ok(); } } } @@ -442,7 +431,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>> { + ) -> Option>>> { let buffer = self.to_base(buffer, &[position], cx)?; self.0.hover(&buffer, position, cx) } @@ -478,7 +467,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider { } fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool { - if let Some(buffer) = self.to_base(&buffer, &[], cx) { + if let Some(buffer) = self.to_base(buffer, &[], cx) { self.0.supports_inlay_hints(&buffer, cx) } else { false @@ -491,7 +480,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider { position: text::Anchor, cx: &mut App, ) -> Option>>> { - let buffer = self.to_base(&buffer, &[position], cx)?; + let buffer = self.to_base(buffer, &[position], cx)?; self.0.document_highlights(&buffer, position, cx) } @@ -501,8 +490,8 @@ impl SemanticsProvider for BranchBufferSemanticsProvider { position: text::Anchor, kind: crate::GotoDefinitionKind, cx: &mut App, - ) -> Option>>> { - let buffer = self.to_base(&buffer, &[position], cx)?; + ) -> Option>>>> { + let buffer = self.to_base(buffer, &[position], cx)?; self.0.definitions(&buffer, position, kind, cx) } diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index 2b8150de67050ccced22100bfedd02be44f63907..e3d83ab1609907834083f2c6a0be6640ce110f3e 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -35,12 +35,12 @@ pub fn apply_related_actions(editor: &Entity, window: &mut Window, cx: & .filter_map(|buffer| buffer.read(cx).language()) .any(|language| is_rust_language(language)) { - register_action(&editor, window, go_to_parent_module); - register_action(&editor, window, expand_macro_recursively); - register_action(&editor, window, open_docs); - register_action(&editor, window, cancel_flycheck_action); - register_action(&editor, window, run_flycheck_action); - register_action(&editor, window, clear_flycheck_action); + register_action(editor, window, go_to_parent_module); + register_action(editor, window, expand_macro_recursively); + register_action(editor, window, open_docs); + register_action(editor, window, cancel_flycheck_action); + register_action(editor, window, run_flycheck_action); + register_action(editor, window, clear_flycheck_action); } } @@ -285,11 +285,11 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu workspace.update(cx, |_workspace, cx| { // Check if the local document exists, otherwise fallback to the online document. // Open with the default browser. - if let Some(local_url) = docs_urls.local { - if fs::metadata(Path::new(&local_url[8..])).is_ok() { - cx.open_url(&local_url); - return; - } + if let Some(local_url) = docs_urls.local + && fs::metadata(Path::new(&local_url[8..])).is_ok() + { + cx.open_url(&local_url); + return; } if let Some(web_url) = docs_urls.web { diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index 08ff23f8f70be4e512826c2793a2d95e2aee1690..82314486187db99c2ba5c104faa42828dad57cdb 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -675,7 +675,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } @@ -703,20 +703,20 @@ impl Editor { if matches!( settings.defaults.soft_wrap, SoftWrap::PreferredLineLength | SoftWrap::Bounded - ) { - if (settings.defaults.preferred_line_length as f32) < visible_column_count { - visible_column_count = settings.defaults.preferred_line_length as f32; - } + ) && (settings.defaults.preferred_line_length as f32) < visible_column_count + { + visible_column_count = settings.defaults.preferred_line_length as f32; } // If the scroll position is currently at the left edge of the document // (x == 0.0) and the intent is to scroll right, the gutter's margin // should first be added to the current position, otherwise the cursor // will end at the column position minus the margin, which looks off. - if current_position.x == 0.0 && amount.columns(visible_column_count) > 0. { - if let Some(last_position_map) = &self.last_position_map { - current_position.x += self.gutter_dimensions.margin / last_position_map.em_advance; - } + if current_position.x == 0.0 + && amount.columns(visible_column_count) > 0. + && let Some(last_position_map) = &self.last_position_map + { + current_position.x += self.gutter_dimensions.margin / last_position_map.em_advance; } let new_position = current_position + point( @@ -749,12 +749,10 @@ impl Editor { if let (Some(visible_lines), Some(visible_columns)) = (self.visible_line_count(), self.visible_column_count()) + && newest_head.row() <= DisplayRow(screen_top.row().0 + visible_lines as u32) + && newest_head.column() <= screen_top.column() + visible_columns as u32 { - if newest_head.row() <= DisplayRow(screen_top.row().0 + visible_lines as u32) - && newest_head.column() <= screen_top.column() + visible_columns as u32 - { - return Ordering::Equal; - } + return Ordering::Equal; } Ordering::Greater diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index 72827b2fee48c424a632018b5f66015cd058ed79..f8104665f904e08466c72f3c410e58cb941c6b6f 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -16,7 +16,7 @@ impl Editor { return; } - if matches!(self.mode, EditorMode::SingleLine { .. }) { + if matches!(self.mode, EditorMode::SingleLine) { cx.propagate(); return; } diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index 88d3b52d764d15280c8ed03dd87f42b8c32d0911..057d622903ed12b4d996759cd93dc76f2ba9ee8d 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -116,12 +116,12 @@ impl Editor { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let mut scroll_position = self.scroll_manager.scroll_position(&display_map); let original_y = scroll_position.y; - if let Some(last_bounds) = self.expect_bounds_change.take() { - if scroll_position.y != 0. { - scroll_position.y += (bounds.top() - last_bounds.top()) / line_height; - if scroll_position.y < 0. { - scroll_position.y = 0.; - } + if let Some(last_bounds) = self.expect_bounds_change.take() + && scroll_position.y != 0. + { + scroll_position.y += (bounds.top() - last_bounds.top()) / line_height; + if scroll_position.y < 0. { + scroll_position.y = 0.; } } if scroll_position.y > max_scroll_top { diff --git a/crates/editor/src/scroll/scroll_amount.rs b/crates/editor/src/scroll/scroll_amount.rs index b2af4f8e4fbce899c6aee317402ee1365cee8600..5992c9023c1f9d6eb7e7eb201099c6eef17a33d8 100644 --- a/crates/editor/src/scroll/scroll_amount.rs +++ b/crates/editor/src/scroll/scroll_amount.rs @@ -67,10 +67,7 @@ impl ScrollAmount { } pub fn is_full_page(&self) -> bool { - match self { - ScrollAmount::Page(count) if count.abs() == 1.0 => true, - _ => false, - } + matches!(self, ScrollAmount::Page(count) if count.abs() == 1.0) } pub fn direction(&self) -> ScrollDirection { diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 73c5f1c076e510b2aeb7d648b7ce066b65f9094c..0a02390b641e1020aff8d9cf0167b44485baf489 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -119,8 +119,8 @@ impl SelectionsCollection { cx: &mut App, ) -> Option> { let map = self.display_map(cx); - let selection = resolve_selections(self.pending_anchor().as_ref(), &map).next(); - selection + + resolve_selections(self.pending_anchor().as_ref(), &map).next() } pub(crate) fn pending_mode(&self) -> Option { @@ -276,18 +276,18 @@ impl SelectionsCollection { cx: &mut App, ) -> Selection { let map = self.display_map(cx); - let selection = resolve_selections([self.newest_anchor()], &map) + + resolve_selections([self.newest_anchor()], &map) .next() - .unwrap(); - selection + .unwrap() } pub fn newest_display(&self, cx: &mut App) -> Selection { let map = self.display_map(cx); - let selection = resolve_selections_display([self.newest_anchor()], &map) + + resolve_selections_display([self.newest_anchor()], &map) .next() - .unwrap(); - selection + .unwrap() } pub fn oldest_anchor(&self) -> &Selection { @@ -303,10 +303,10 @@ impl SelectionsCollection { cx: &mut App, ) -> Selection { let map = self.display_map(cx); - let selection = resolve_selections([self.oldest_anchor()], &map) + + resolve_selections([self.oldest_anchor()], &map) .next() - .unwrap(); - selection + .unwrap() } pub fn first_anchor(&self) -> Selection { diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index e9f8d2dbd33f71e224ae1c868dab80a7c4bb467a..cb21f35d7ed7556cf09f9e566286a10f8317ca6c 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -169,7 +169,7 @@ impl Editor { else { return; }; - let Some(lsp_store) = self.project.as_ref().map(|p| p.read(cx).lsp_store()) else { + let Some(lsp_store) = self.project().map(|p| p.read(cx).lsp_store()) else { return; }; let task = lsp_store.update(cx, |lsp_store, cx| { @@ -182,7 +182,9 @@ impl Editor { let signature_help = task.await; editor .update(cx, |editor, cx| { - let Some(mut signature_help) = signature_help.into_iter().next() else { + let Some(mut signature_help) = + signature_help.unwrap_or_default().into_iter().next() + else { editor .signature_help_state .hide(SignatureHelpHiddenBy::AutoClose); @@ -196,7 +198,7 @@ impl Editor { .highlight_text(&text, 0..signature.label.len()) .into_iter() .flat_map(|(range, highlight_id)| { - Some((range, highlight_id.style(&cx.theme().syntax())?)) + Some((range, highlight_id.style(cx.theme().syntax())?)) }); signature.highlights = combine_highlights(signature.highlights.clone(), highlights) diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs index 0d497e4cac779a65b7a6593d3b82f786d10321ce..8be2a3a2e14d7b815d2ca3496adc6f70ec16055e 100644 --- a/crates/editor/src/tasks.rs +++ b/crates/editor/src/tasks.rs @@ -89,7 +89,7 @@ impl Editor { .lsp_task_source()?; if lsp_settings .get(&lsp_tasks_source) - .map_or(true, |s| s.enable_lsp_tasks) + .is_none_or(|s| s.enable_lsp_tasks) { let buffer_id = buffer.read(cx).remote_id(); Some((lsp_tasks_source, buffer_id)) diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 0a9d5e9535d2b2d29e33ee49a8afa46a387d773e..960fecf59a8ae80d168f5ab82c74f32dfa7d4745 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -53,7 +53,7 @@ pub fn marked_display_snapshot( let (unmarked_text, markers) = marked_text_offsets(text); let font = Font { - family: "Zed Plex Mono".into(), + family: ".ZedMono".into(), features: FontFeatures::default(), fallbacks: None, weight: FontWeight::default(), @@ -184,12 +184,12 @@ pub fn editor_content_with_blocks(editor: &Entity, cx: &mut VisualTestCo for (row, block) in blocks { match block { Block::Custom(custom_block) => { - if let BlockPlacement::Near(x) = &custom_block.placement { - if snapshot.intersects_fold(x.to_point(&snapshot.buffer_snapshot)) { - continue; - } + if let BlockPlacement::Near(x) = &custom_block.placement + && snapshot.intersects_fold(x.to_point(&snapshot.buffer_snapshot)) + { + continue; }; - let content = block_content_for_tests(&editor, custom_block.id, cx) + let content = block_content_for_tests(editor, custom_block.id, cx) .expect("block content not found"); // 2: "related info 1 for diagnostic 0" if let Some(height) = custom_block.height { @@ -230,26 +230,23 @@ pub fn editor_content_with_blocks(editor: &Entity, cx: &mut VisualTestCo lines[row as usize].push_str("§ -----"); } } - Block::ExcerptBoundary { - excerpt, - height, - starts_new_buffer, - } => { - if starts_new_buffer { - lines[row.0 as usize].push_str(&cx.update(|_, cx| { - format!( - "§ {}", - excerpt - .buffer - .file() - .unwrap() - .file_name(cx) - .to_string_lossy() - ) - })); - } else { - lines[row.0 as usize].push_str("§ -----") + Block::ExcerptBoundary { height, .. } => { + for row in row.0..row.0 + height { + lines[row as usize].push_str("§ -----"); } + } + Block::BufferHeader { excerpt, height } => { + lines[row.0 as usize].push_str(&cx.update(|_, cx| { + format!( + "§ {}", + excerpt + .buffer + .file() + .unwrap() + .file_name(cx) + .to_string_lossy() + ) + })); for row in row.0 + 1..row.0 + height { lines[row as usize].push_str("§ -----"); } diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index c59786b1eb387835a21e2c155efaf6acefd4ff4a..3f78fa2f3e9bcd592ba5d2a9f29c42967a27c126 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -300,6 +300,7 @@ impl EditorLspTestContext { self.to_lsp_range(ranges[0].clone()) } + #[expect(clippy::wrong_self_convention, reason = "This is test code")] pub fn to_lsp_range(&mut self, range: Range) -> lsp::Range { let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx)); let start_point = range.start.to_point(&snapshot.buffer_snapshot); @@ -326,6 +327,7 @@ impl EditorLspTestContext { }) } + #[expect(clippy::wrong_self_convention, reason = "This is test code")] pub fn to_lsp(&mut self, offset: usize) -> lsp::Position { let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx)); let point = offset.to_point(&snapshot.buffer_snapshot); diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index bdf73da5fbfd5d4c29826859790493fbb8494239..8c54c265edf7a19af9d17e982a5f4cb6a0079cc3 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -119,13 +119,7 @@ impl EditorTestContext { for excerpt in excerpts.into_iter() { let (text, ranges) = marked_text_ranges(excerpt, false); let buffer = cx.new(|cx| Buffer::local(text, cx)); - multibuffer.push_excerpts( - buffer, - ranges - .into_iter() - .map(|range| ExcerptRange::new(range.clone())), - cx, - ); + multibuffer.push_excerpts(buffer, ranges.into_iter().map(ExcerptRange::new), cx); } multibuffer }); @@ -297,9 +291,8 @@ impl EditorTestContext { pub fn set_head_text(&mut self, diff_base: &str) { self.cx.run_until_parked(); - let fs = self.update_editor(|editor, _, cx| { - editor.project.as_ref().unwrap().read(cx).fs().as_fake() - }); + let fs = + self.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).fs().as_fake()); let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); fs.set_head_for_repo( &Self::root_path().join(".git"), @@ -311,18 +304,16 @@ impl EditorTestContext { pub fn clear_index_text(&mut self) { self.cx.run_until_parked(); - let fs = self.update_editor(|editor, _, cx| { - editor.project.as_ref().unwrap().read(cx).fs().as_fake() - }); + let fs = + self.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).fs().as_fake()); fs.set_index_for_repo(&Self::root_path().join(".git"), &[]); self.cx.run_until_parked(); } pub fn set_index_text(&mut self, diff_base: &str) { self.cx.run_until_parked(); - let fs = self.update_editor(|editor, _, cx| { - editor.project.as_ref().unwrap().read(cx).fs().as_fake() - }); + let fs = + self.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).fs().as_fake()); let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); fs.set_index_for_repo( &Self::root_path().join(".git"), @@ -333,9 +324,8 @@ impl EditorTestContext { #[track_caller] pub fn assert_index_text(&mut self, expected: Option<&str>) { - let fs = self.update_editor(|editor, _, cx| { - editor.project.as_ref().unwrap().read(cx).fs().as_fake() - }); + let fs = + self.update_editor(|editor, _, cx| editor.project().unwrap().read(cx).fs().as_fake()); let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); let mut found = None; fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| { @@ -430,7 +420,7 @@ impl EditorTestContext { if expected_text == "[FOLDED]\n" { assert!(is_folded, "excerpt {} should be folded", ix); let is_selected = selections.iter().any(|s| s.head().excerpt_id == excerpt_id); - if expected_selections.len() > 0 { + if !expected_selections.is_empty() { assert!( is_selected, "excerpt {ix} should be selected. got {:?}", diff --git a/crates/eval/build.rs b/crates/eval/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..9ab40da0fb0ca880cecc3a87d5a9e95172dcb6ec --- /dev/null +++ b/crates/eval/build.rs @@ -0,0 +1,14 @@ +fn main() { + let cargo_toml = + std::fs::read_to_string("../zed/Cargo.toml").expect("Failed to read crates/zed/Cargo.toml"); + let version = cargo_toml + .lines() + .find(|line| line.starts_with("version = ")) + .expect("Version not found in crates/zed/Cargo.toml") + .split('=') + .nth(1) + .expect("Invalid version format") + .trim() + .trim_matches('"'); + println!("cargo:rustc-env=ZED_PKG_VERSION={}", version); +} diff --git a/crates/eval/src/assertions.rs b/crates/eval/src/assertions.rs index 489e4aa22ecdc6633a0002238a2287ca0a5105f0..01fac186d33a8b5b156121acf924d37c90c64679 100644 --- a/crates/eval/src/assertions.rs +++ b/crates/eval/src/assertions.rs @@ -54,7 +54,7 @@ impl AssertionsReport { pub fn passed_count(&self) -> usize { self.ran .iter() - .filter(|a| a.result.as_ref().map_or(false, |result| result.passed)) + .filter(|a| a.result.as_ref().is_ok_and(|result| result.passed)) .count() } diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index d638ac171feafd8be72925cc26beff6726a3ab8d..9e0504abca479483b4e5f49c41eec1f6ba3834f1 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -103,7 +103,7 @@ fn main() { let languages: HashSet = args.languages.into_iter().collect(); let http_client = Arc::new(ReqwestClient::new()); - let app = Application::headless().with_http_client(http_client.clone()); + let app = Application::headless().with_http_client(http_client); let all_threads = examples::all(&examples_dir); app.run(move |cx| { @@ -112,7 +112,7 @@ fn main() { let telemetry = app_state.client.telemetry(); telemetry.start(system_id, installation_id, session_id, cx); - let enable_telemetry = env::var("ZED_EVAL_TELEMETRY").map_or(false, |value| value == "1") + let enable_telemetry = env::var("ZED_EVAL_TELEMETRY").is_ok_and(|value| value == "1") && telemetry.has_checksum_seed(); if enable_telemetry { println!("Telemetry enabled"); @@ -167,15 +167,14 @@ fn main() { continue; } - if let Some(language) = meta.language_server { - if !languages.contains(&language.file_extension) { + if let Some(language) = meta.language_server + && !languages.contains(&language.file_extension) { panic!( "Eval for {:?} could not be run because no language server was found for extension {:?}", meta.name, language.file_extension ); } - } // TODO: This creates a worktree per repetition. Ideally these examples should // either be run sequentially on the same worktree, or reuse worktrees when there @@ -337,7 +336,7 @@ pub struct AgentAppState { } pub fn init(cx: &mut App) -> Arc { - let app_version = AppVersion::global(cx); + let app_version = AppVersion::load(env!("ZED_PKG_VERSION")); release_channel::init(app_version, cx); gpui_tokio::init(cx); @@ -350,7 +349,7 @@ pub fn init(cx: &mut App) -> Arc { // Set User-Agent so we can download language servers from GitHub let user_agent = format!( - "Zed/{} ({}; {})", + "Zed Agent Eval/{} ({}; {})", app_version, std::env::consts::OS, std::env::consts::ARCH @@ -417,11 +416,7 @@ pub fn init(cx: &mut App) -> Arc { language::init(cx); debug_adapter_extension::init(extension_host_proxy.clone(), cx); - language_extension::init( - LspAccess::Noop, - extension_host_proxy.clone(), - languages.clone(), - ); + language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); language_model::init(client.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), node_runtime.clone(), cx); @@ -520,7 +515,7 @@ async fn judge_example( enable_telemetry: bool, cx: &AsyncApp, ) -> JudgeOutput { - let judge_output = example.judge(model.clone(), &run_output, cx).await; + let judge_output = example.judge(model.clone(), run_output, cx).await; if enable_telemetry { telemetry::event!( @@ -531,7 +526,7 @@ async fn judge_example( example_name = example.name.clone(), example_repetition = example.repetition, diff_evaluation = judge_output.diff.clone(), - thread_evaluation = judge_output.thread.clone(), + thread_evaluation = judge_output.thread, tool_metrics = run_output.tool_metrics, response_count = run_output.response_count, token_usage = run_output.token_usage, @@ -711,7 +706,7 @@ fn print_report( println!("Average thread score: {average_thread_score}%"); } - println!(""); + println!(); print_h2("CUMULATIVE TOOL METRICS"); println!("{}", cumulative_tool_metrics); diff --git a/crates/eval/src/example.rs b/crates/eval/src/example.rs index 23c8814916da2df4016c4196d7767b748da54280..457b62e98ca4cabf83fb379cbaa70f07957ac6b7 100644 --- a/crates/eval/src/example.rs +++ b/crates/eval/src/example.rs @@ -64,7 +64,7 @@ impl ExampleMetadata { self.url .split('/') .next_back() - .unwrap_or(&"") + .unwrap_or("") .trim_end_matches(".git") .into() } @@ -255,7 +255,7 @@ impl ExampleContext { thread.update(cx, |thread, _cx| { if let Some(tool_use) = pending_tool_use { let mut tool_metrics = tool_metrics.lock().unwrap(); - if let Some(tool_result) = thread.tool_result(&tool_use_id) { + if let Some(tool_result) = thread.tool_result(tool_use_id) { let message = if tool_result.is_error { format!("✖︎ {}", tool_use.name) } else { @@ -335,7 +335,7 @@ impl ExampleContext { for message in thread.messages().skip(message_count_before) { messages.push(Message { _role: message.role, - text: message.to_string(), + text: message.to_message_content(), tool_use: thread .tool_uses_for_message(message.id, cx) .into_iter() diff --git a/crates/eval/src/examples/add_arg_to_trait_method.rs b/crates/eval/src/examples/add_arg_to_trait_method.rs index 9c538f926059eb3998eb725168905d148dccdc9d..084f12bc6263da030d313c362cc3d051dfdb8ea8 100644 --- a/crates/eval/src/examples/add_arg_to_trait_method.rs +++ b/crates/eval/src/examples/add_arg_to_trait_method.rs @@ -70,10 +70,10 @@ impl Example for AddArgToTraitMethod { let path_str = format!("crates/assistant_tools/src/{}.rs", tool_name); let edits = edits.get(Path::new(&path_str)); - let ignored = edits.map_or(false, |edits| { + let ignored = edits.is_some_and(|edits| { edits.has_added_line(" _window: Option,\n") }); - let uningored = edits.map_or(false, |edits| { + let uningored = edits.is_some_and(|edits| { edits.has_added_line(" window: Option,\n") }); @@ -89,7 +89,7 @@ impl Example for AddArgToTraitMethod { let batch_tool_edits = edits.get(Path::new("crates/assistant_tools/src/batch_tool.rs")); cx.assert( - batch_tool_edits.map_or(false, |edits| { + batch_tool_edits.is_some_and(|edits| { edits.has_added_line(" window: Option,\n") }), "Argument: batch_tool", diff --git a/crates/eval/src/explorer.rs b/crates/eval/src/explorer.rs index ee1dfa95c3840af42bdd134be1110bd2483c97aa..3326070cea4e860210f8ba7e0038fec2f3404c30 100644 --- a/crates/eval/src/explorer.rs +++ b/crates/eval/src/explorer.rs @@ -46,27 +46,25 @@ fn find_target_files_recursive( max_depth, found_files, )?; - } else if path.is_file() { - if let Some(filename_osstr) = path.file_name() { - if let Some(filename_str) = filename_osstr.to_str() { - if filename_str == target_filename { - found_files.push(path); - } - } - } + } else if path.is_file() + && let Some(filename_osstr) = path.file_name() + && let Some(filename_str) = filename_osstr.to_str() + && filename_str == target_filename + { + found_files.push(path); } } Ok(()) } pub fn generate_explorer_html(input_paths: &[PathBuf], output_path: &PathBuf) -> Result { - if let Some(parent) = output_path.parent() { - if !parent.exists() { - fs::create_dir_all(parent).context(format!( - "Failed to create output directory: {}", - parent.display() - ))?; - } + if let Some(parent) = output_path.parent() + && !parent.exists() + { + fs::create_dir_all(parent).context(format!( + "Failed to create output directory: {}", + parent.display() + ))?; } let template_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("src/explorer.html"); diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index 0f2b4c18eade06060f9002615b6b995d9bfdde0d..c6e4e0b6ec683b63b90920861f3cd023069666e6 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -90,11 +90,8 @@ impl ExampleInstance { worktrees_dir: &Path, repetition: usize, ) -> Self { - let name = thread.meta().name.to_string(); - let run_directory = run_dir - .join(&name) - .join(repetition.to_string()) - .to_path_buf(); + let name = thread.meta().name; + let run_directory = run_dir.join(&name).join(repetition.to_string()); let repo_path = repo_path_for_url(repos_dir, &thread.meta().url); @@ -376,11 +373,10 @@ impl ExampleInstance { ); let result = this.thread.conversation(&mut example_cx).await; - if let Err(err) = result { - if !err.is::() { + if let Err(err) = result + && !err.is::() { return Err(err); } - } println!("{}Stopped", this.log_prefix); @@ -459,8 +455,8 @@ impl ExampleInstance { let mut output_file = File::create(self.run_directory.join("judge.md")).expect("failed to create judge.md"); - let diff_task = self.judge_diff(model.clone(), &run_output, cx); - let thread_task = self.judge_thread(model.clone(), &run_output, cx); + let diff_task = self.judge_diff(model.clone(), run_output, cx); + let thread_task = self.judge_thread(model.clone(), run_output, cx); let (diff_result, thread_result) = futures::join!(diff_task, thread_task); @@ -661,7 +657,7 @@ pub fn wait_for_lang_server( .update(cx, |buffer, cx| { lsp_store.update(cx, |lsp_store, cx| { lsp_store - .language_servers_for_local_buffer(&buffer, cx) + .language_servers_for_local_buffer(buffer, cx) .next() .is_some() }) @@ -679,8 +675,8 @@ pub fn wait_for_lang_server( [ cx.subscribe(&lsp_store, { let log_prefix = log_prefix.clone(); - move |_, event, _| match event { - project::LspStoreEvent::LanguageServerUpdate { + move |_, event, _| { + if let project::LspStoreEvent::LanguageServerUpdate { message: client::proto::update_language_server::Variant::WorkProgress( LspWorkProgress { @@ -689,11 +685,13 @@ pub fn wait_for_lang_server( }, ), .. - } => println!("{}⟲ {message}", log_prefix), - _ => {} + } = event + { + println!("{}⟲ {message}", log_prefix) + } } }), - cx.subscribe(&project, { + cx.subscribe(project, { let buffer = buffer.clone(); move |project, event, cx| match event { project::Event::LanguageServerAdded(_, _, _) => { @@ -771,7 +769,7 @@ pub async fn query_lsp_diagnostics( } fn parse_assertion_result(response: &str) -> Result { - let analysis = get_tag("analysis", response)?.to_string(); + let analysis = get_tag("analysis", response)?; let passed = match get_tag("passed", response)?.to_lowercase().as_str() { "true" => true, "false" => false, @@ -838,7 +836,7 @@ fn messages_to_markdown<'a>(message_iter: impl IntoIterator) for segment in &message.segments { match segment { MessageSegment::Text(text) => { - messages.push_str(&text); + messages.push_str(text); messages.push_str("\n\n"); } MessageSegment::Thinking { text, signature } => { @@ -846,7 +844,7 @@ fn messages_to_markdown<'a>(message_iter: impl IntoIterator) if let Some(sig) = signature { messages.push_str(&format!("Signature: {}\n\n", sig)); } - messages.push_str(&text); + messages.push_str(text); messages.push_str("\n"); } MessageSegment::RedactedThinking(items) => { @@ -878,7 +876,7 @@ pub async fn send_language_model_request( request: LanguageModelRequest, cx: &AsyncApp, ) -> anyhow::Result { - match model.stream_completion_text(request, &cx).await { + match model.stream_completion_text(request, cx).await { Ok(mut stream) => { let mut full_response = String::new(); while let Some(chunk_result) = stream.stream.next().await { @@ -915,9 +913,9 @@ impl RequestMarkdown { for tool in &request.tools { write!(&mut tools, "# {}\n\n", tool.name).unwrap(); write!(&mut tools, "{}\n\n", tool.description).unwrap(); - write!( + writeln!( &mut tools, - "{}\n", + "{}", MarkdownCodeBlock { tag: "json", text: &format!("{:#}", tool.input_schema) @@ -1191,7 +1189,7 @@ mod test { output.analysis, Some("The model did a good job but there were still compilations errors.".into()) ); - assert_eq!(output.passed, true); + assert!(output.passed); let response = r#" Text around ignored @@ -1211,6 +1209,6 @@ mod test { output.analysis, Some("Failed to compile:\n- Error 1\n- Error 2".into()) ); - assert_eq!(output.passed, false); + assert!(!output.passed); } } diff --git a/crates/extension/src/extension.rs b/crates/extension/src/extension.rs index 35f7f419383cb9f3c6cc518663ad818735eab80e..6af793253bce2d122a5361f6b83f33cb39d45253 100644 --- a/crates/extension/src/extension.rs +++ b/crates/extension/src/extension.rs @@ -178,16 +178,15 @@ pub fn parse_wasm_extension_version( for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) { if let wasmparser::Payload::CustomSection(s) = part.context("error parsing wasm extension")? + && s.name() == "zed:api-version" { - if s.name() == "zed:api-version" { - version = parse_wasm_extension_version_custom_section(s.data()); - if version.is_none() { - bail!( - "extension {} has invalid zed:api-version section: {:?}", - extension_id, - s.data() - ); - } + version = parse_wasm_extension_version_custom_section(s.data()); + if version.is_none() { + bail!( + "extension {} has invalid zed:api-version section: {:?}", + extension_id, + s.data() + ); } } } diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 621ba9250c12f8edd4ab49bbdef13bc976a239dd..3a3026f19c1961a6f4ac4c7fe5ac217ef6855cea 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -401,7 +401,7 @@ impl ExtensionBuilder { let mut clang_path = wasi_sdk_dir.clone(); clang_path.extend(["bin", &format!("clang{}", env::consts::EXE_SUFFIX)]); - if fs::metadata(&clang_path).map_or(false, |metadata| metadata.is_file()) { + if fs::metadata(&clang_path).is_ok_and(|metadata| metadata.is_file()) { return Ok(clang_path); } @@ -452,7 +452,7 @@ impl ExtensionBuilder { let mut output = Vec::new(); let mut stack = Vec::new(); - for payload in Parser::new(0).parse_all(&input) { + for payload in Parser::new(0).parse_all(input) { let payload = payload?; // Track nesting depth, so that we don't mess with inner producer sections: @@ -484,14 +484,10 @@ impl ExtensionBuilder { _ => {} } - match &payload { - CustomSection(c) => { - if strip_custom_section(c.name()) { - continue; - } - } - - _ => {} + if let CustomSection(c) = &payload + && strip_custom_section(c.name()) + { + continue; } if let Some((id, range)) = payload.as_section() { RawSection { diff --git a/crates/extension/src/extension_events.rs b/crates/extension/src/extension_events.rs index b151b3f412ea523a1c5b97dea210adf68e5bea89..94f3277b05b76aa93717458c57d0280a15b8435f 100644 --- a/crates/extension/src/extension_events.rs +++ b/crates/extension/src/extension_events.rs @@ -19,9 +19,8 @@ pub struct ExtensionEvents; impl ExtensionEvents { /// Returns the global [`ExtensionEvents`]. pub fn try_global(cx: &App) -> Option> { - return cx - .try_global::() - .map(|g| g.0.clone()); + cx.try_global::() + .map(|g| g.0.clone()) } fn new(_cx: &mut Context) -> Self { diff --git a/crates/extension/src/extension_host_proxy.rs b/crates/extension/src/extension_host_proxy.rs index 917739759f2ab0dcbfe012b1d774a8c9f11ca96b..6a24e3ba3f496bd0f0b89d61e9125b29ecae0204 100644 --- a/crates/extension/src/extension_host_proxy.rs +++ b/crates/extension/src/extension_host_proxy.rs @@ -28,7 +28,6 @@ pub struct ExtensionHostProxy { snippet_proxy: RwLock>>, slash_command_proxy: RwLock>>, context_server_proxy: RwLock>>, - indexed_docs_provider_proxy: RwLock>>, debug_adapter_provider_proxy: RwLock>>, } @@ -54,7 +53,6 @@ impl ExtensionHostProxy { snippet_proxy: RwLock::default(), slash_command_proxy: RwLock::default(), context_server_proxy: RwLock::default(), - indexed_docs_provider_proxy: RwLock::default(), debug_adapter_provider_proxy: RwLock::default(), } } @@ -87,14 +85,6 @@ impl ExtensionHostProxy { self.context_server_proxy.write().replace(Arc::new(proxy)); } - pub fn register_indexed_docs_provider_proxy( - &self, - proxy: impl ExtensionIndexedDocsProviderProxy, - ) { - self.indexed_docs_provider_proxy - .write() - .replace(Arc::new(proxy)); - } pub fn register_debug_adapter_proxy(&self, proxy: impl ExtensionDebugAdapterProviderProxy) { self.debug_adapter_provider_proxy .write() @@ -408,30 +398,6 @@ impl ExtensionContextServerProxy for ExtensionHostProxy { } } -pub trait ExtensionIndexedDocsProviderProxy: Send + Sync + 'static { - fn register_indexed_docs_provider(&self, extension: Arc, provider_id: Arc); - - fn unregister_indexed_docs_provider(&self, provider_id: Arc); -} - -impl ExtensionIndexedDocsProviderProxy for ExtensionHostProxy { - fn register_indexed_docs_provider(&self, extension: Arc, provider_id: Arc) { - let Some(proxy) = self.indexed_docs_provider_proxy.read().clone() else { - return; - }; - - proxy.register_indexed_docs_provider(extension, provider_id) - } - - fn unregister_indexed_docs_provider(&self, provider_id: Arc) { - let Some(proxy) = self.indexed_docs_provider_proxy.read().clone() else { - return; - }; - - proxy.unregister_indexed_docs_provider(provider_id) - } -} - pub trait ExtensionDebugAdapterProviderProxy: Send + Sync + 'static { fn register_debug_adapter( &self, diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index 5852b3e3fc32601e8d9527e02d593e02cd66f3c6..f5296198b06ffeeb83dd21be35d27be6b4387294 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -84,8 +84,6 @@ pub struct ExtensionManifest { #[serde(default)] pub slash_commands: BTreeMap, SlashCommandManifestEntry>, #[serde(default)] - pub indexed_docs_providers: BTreeMap, IndexedDocsProviderEntry>, - #[serde(default)] pub snippets: Option, #[serde(default)] pub capabilities: Vec, @@ -195,9 +193,6 @@ pub struct SlashCommandManifestEntry { pub requires_argument: bool, } -#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] -pub struct IndexedDocsProviderEntry {} - #[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] pub struct DebugAdapterManifestEntry { pub schema_path: Option, @@ -271,7 +266,6 @@ fn manifest_from_old_manifest( language_servers: Default::default(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: Vec::new(), debug_adapters: Default::default(), @@ -304,7 +298,6 @@ mod tests { language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: vec![], debug_adapters: Default::default(), diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index aacc5d8795202e8d84c043a881933eabefae36bd..72327179ee08550994854d8b95a190ac94d84cea 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -232,10 +232,10 @@ pub trait Extension: Send + Sync { /// /// To work through a real-world example, take a `cargo run` task and a hypothetical `cargo` locator: /// 1. We may need to modify the task; in this case, it is problematic that `cargo run` spawns a binary. We should turn `cargo run` into a debug scenario with - /// `cargo build` task. This is the decision we make at `dap_locator_create_scenario` scope. + /// `cargo build` task. This is the decision we make at `dap_locator_create_scenario` scope. /// 2. Then, after the build task finishes, we will run `run_dap_locator` of the locator that produced the build task to find the program to be debugged. This function - /// should give us a debugger-agnostic configuration for launching a debug target (that we end up resolving with [`Extension::dap_config_to_scenario`]). It's almost as if the user - /// found the artifact path by themselves. + /// should give us a debugger-agnostic configuration for launching a debug target (that we end up resolving with [`Extension::dap_config_to_scenario`]). It's almost as if the user + /// found the artifact path by themselves. /// /// Note that you're not obliged to use build tasks with locators. Specifically, it is sufficient to provide a debug configuration directly in the return value of /// `dap_locator_create_scenario` if you're able to do that. Make sure to not fill out `build` field in that case, as that will prevent Zed from running second phase of resolution in such case. diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index ab4a9cddb0fa13421677772d1c07c1a8d9234d76..d6c0501efdacff2a9eaf542695ed44325908ea56 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -144,10 +144,6 @@ fn extension_provides(manifest: &ExtensionManifest) -> BTreeSet ExtensionManifest { .collect(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: vec![ExtensionCapability::ProcessExec( extension::ProcessExecCapability { diff --git a/crates/extension_host/src/capability_granter.rs b/crates/extension_host/src/capability_granter.rs index c77e5ecba15b5e10caa331d3b6ee3976b899ed21..5491967e080fc4d12a52f0360dab1896b77e19d3 100644 --- a/crates/extension_host/src/capability_granter.rs +++ b/crates/extension_host/src/capability_granter.rs @@ -108,7 +108,6 @@ mod tests { language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: vec![], debug_adapters: Default::default(), @@ -146,7 +145,7 @@ mod tests { command: "*".to_string(), args: vec!["**".to_string()], })], - manifest.clone(), + manifest, ); assert!(granter.grant_exec("ls", &["-la"]).is_ok()); } diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index dc38c244f1f94b6aeb266d52c7933394412ec269..fde0aeac9405d114f9cee89ca054d4503a35d482 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -16,9 +16,9 @@ pub use extension::ExtensionManifest; use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; use extension::{ ExtensionContextServerProxy, ExtensionDebugAdapterProviderProxy, ExtensionEvents, - ExtensionGrammarProxy, ExtensionHostProxy, ExtensionIndexedDocsProviderProxy, - ExtensionLanguageProxy, ExtensionLanguageServerProxy, ExtensionSlashCommandProxy, - ExtensionSnippetProxy, ExtensionThemeProxy, + ExtensionGrammarProxy, ExtensionHostProxy, ExtensionLanguageProxy, + ExtensionLanguageServerProxy, ExtensionSlashCommandProxy, ExtensionSnippetProxy, + ExtensionThemeProxy, }; use fs::{Fs, RemoveOptions}; use futures::future::join_all; @@ -93,10 +93,9 @@ pub fn is_version_compatible( .wasm_api_version .as_ref() .and_then(|wasm_api_version| SemanticVersion::from_str(wasm_api_version).ok()) + && !is_supported_wasm_api_version(release_channel, wasm_api_version) { - if !is_supported_wasm_api_version(release_channel, wasm_api_version) { - return false; - } + return false; } true @@ -292,19 +291,17 @@ impl ExtensionStore { // it must be asynchronously rebuilt. let mut extension_index = ExtensionIndex::default(); let mut extension_index_needs_rebuild = true; - if let Ok(index_content) = index_content { - if let Some(index) = serde_json::from_str(&index_content).log_err() { - extension_index = index; - if let (Ok(Some(index_metadata)), Ok(Some(extensions_metadata))) = - (index_metadata, extensions_metadata) - { - if index_metadata - .mtime - .bad_is_greater_than(extensions_metadata.mtime) - { - extension_index_needs_rebuild = false; - } - } + if let Ok(index_content) = index_content + && let Some(index) = serde_json::from_str(&index_content).log_err() + { + extension_index = index; + if let (Ok(Some(index_metadata)), Ok(Some(extensions_metadata))) = + (index_metadata, extensions_metadata) + && index_metadata + .mtime + .bad_is_greater_than(extensions_metadata.mtime) + { + extension_index_needs_rebuild = false; } } @@ -392,10 +389,9 @@ impl ExtensionStore { if let Some(path::Component::Normal(extension_dir_name)) = event_path.components().next() + && let Some(extension_id) = extension_dir_name.to_str() { - if let Some(extension_id) = extension_dir_name.to_str() { - reload_tx.unbounded_send(Some(extension_id.into())).ok(); - } + reload_tx.unbounded_send(Some(extension_id.into())).ok(); } } } @@ -566,12 +562,12 @@ impl ExtensionStore { extensions .into_iter() .filter(|extension| { - this.extension_index.extensions.get(&extension.id).map_or( - true, - |installed_extension| { + this.extension_index + .extensions + .get(&extension.id) + .is_none_or(|installed_extension| { installed_extension.manifest.version != extension.manifest.version - }, - ) + }) }) .collect() }) @@ -763,8 +759,8 @@ impl ExtensionStore { if let ExtensionOperation::Install = operation { this.update( cx, |this, cx| { cx.emit(Event::ExtensionInstalled(extension_id.clone())); - if let Some(events) = ExtensionEvents::try_global(cx) { - if let Some(manifest) = this.extension_manifest_for_id(&extension_id) { + if let Some(events) = ExtensionEvents::try_global(cx) + && let Some(manifest) = this.extension_manifest_for_id(&extension_id) { events.update(cx, |this, cx| { this.emit( extension::Event::ExtensionInstalled(manifest.clone()), @@ -772,7 +768,6 @@ impl ExtensionStore { ) }); } - } }) .ok(); } @@ -912,12 +907,12 @@ impl ExtensionStore { extension_store.update(cx, |_, cx| { cx.emit(Event::ExtensionUninstalled(extension_id.clone())); - if let Some(events) = ExtensionEvents::try_global(cx) { - if let Some(manifest) = extension_manifest { - events.update(cx, |this, cx| { - this.emit(extension::Event::ExtensionUninstalled(manifest.clone()), cx) - }); - } + if let Some(events) = ExtensionEvents::try_global(cx) + && let Some(manifest) = extension_manifest + { + events.update(cx, |this, cx| { + this.emit(extension::Event::ExtensionUninstalled(manifest.clone()), cx) + }); } })?; @@ -997,12 +992,12 @@ impl ExtensionStore { this.update(cx, |this, cx| this.reload(None, cx))?.await; this.update(cx, |this, cx| { cx.emit(Event::ExtensionInstalled(extension_id.clone())); - if let Some(events) = ExtensionEvents::try_global(cx) { - if let Some(manifest) = this.extension_manifest_for_id(&extension_id) { - events.update(cx, |this, cx| { - this.emit(extension::Event::ExtensionInstalled(manifest.clone()), cx) - }); - } + if let Some(events) = ExtensionEvents::try_global(cx) + && let Some(manifest) = this.extension_manifest_for_id(&extension_id) + { + events.update(cx, |this, cx| { + this.emit(extension::Event::ExtensionInstalled(manifest.clone()), cx) + }); } })?; @@ -1118,15 +1113,17 @@ impl ExtensionStore { extensions_to_unload.len() - reload_count ); - for extension_id in &extensions_to_load { - if let Some(extension) = new_index.extensions.get(extension_id) { - telemetry::event!( - "Extension Loaded", - extension_id, - version = extension.manifest.version - ); - } - } + let extension_ids = extensions_to_load + .iter() + .filter_map(|id| { + Some(( + id.clone(), + new_index.extensions.get(id)?.manifest.version.clone(), + )) + }) + .collect::>(); + + telemetry::event!("Extensions Loaded", id_and_versions = extension_ids); let themes_to_remove = old_index .themes @@ -1178,22 +1175,18 @@ impl ExtensionStore { } } - for (server_id, _) in &extension.manifest.context_servers { + for server_id in extension.manifest.context_servers.keys() { self.proxy.unregister_context_server(server_id.clone(), cx); } - for (adapter, _) in &extension.manifest.debug_adapters { + for adapter in extension.manifest.debug_adapters.keys() { self.proxy.unregister_debug_adapter(adapter.clone()); } - for (locator, _) in &extension.manifest.debug_locators { + for locator in extension.manifest.debug_locators.keys() { self.proxy.unregister_debug_locator(locator.clone()); } - for (command_name, _) in &extension.manifest.slash_commands { + for command_name in extension.manifest.slash_commands.keys() { self.proxy.unregister_slash_command(command_name.clone()); } - for (provider_id, _) in &extension.manifest.indexed_docs_providers { - self.proxy - .unregister_indexed_docs_provider(provider_id.clone()); - } } self.wasm_extensions @@ -1277,6 +1270,7 @@ impl ExtensionStore { queries, context_provider, toolchain_provider: None, + manifest_name: None, }) }), ); @@ -1342,7 +1336,7 @@ impl ExtensionStore { &extension_path, &extension.manifest, wasm_host.clone(), - &cx, + cx, ) .await .with_context(|| format!("Loading extension from {extension_path:?}")); @@ -1392,16 +1386,11 @@ impl ExtensionStore { ); } - for (id, _context_server_entry) in &manifest.context_servers { + for id in manifest.context_servers.keys() { this.proxy .register_context_server(extension.clone(), id.clone(), cx); } - for (provider_id, _provider) in &manifest.indexed_docs_providers { - this.proxy - .register_indexed_docs_provider(extension.clone(), provider_id.clone()); - } - for (debug_adapter, meta) in &manifest.debug_adapters { let mut path = root_dir.clone(); path.push(Path::new(manifest.id.as_ref())); @@ -1462,7 +1451,7 @@ impl ExtensionStore { if extension_dir .file_name() - .map_or(false, |file_name| file_name == ".DS_Store") + .is_some_and(|file_name| file_name == ".DS_Store") { continue; } @@ -1686,9 +1675,8 @@ impl ExtensionStore { let schema_path = &extension::build_debug_adapter_schema_path(adapter_name, meta); if fs.is_file(&src_dir.join(schema_path)).await { - match schema_path.parent() { - Some(parent) => fs.create_dir(&tmp_dir.join(parent)).await?, - None => {} + if let Some(parent) = schema_path.parent() { + fs.create_dir(&tmp_dir.join(parent)).await? } fs.copy_file( &src_dir.join(schema_path), @@ -1782,7 +1770,7 @@ impl ExtensionStore { })?; for client in clients { - Self::sync_extensions_over_ssh(&this, client, cx) + Self::sync_extensions_over_ssh(this, client, cx) .await .log_err(); } @@ -1794,10 +1782,10 @@ impl ExtensionStore { let connection_options = client.read(cx).connection_options(); let ssh_url = connection_options.ssh_url(); - if let Some(existing_client) = self.ssh_clients.get(&ssh_url) { - if existing_client.upgrade().is_some() { - return; - } + if let Some(existing_client) = self.ssh_clients.get(&ssh_url) + && existing_client.upgrade().is_some() + { + return; } self.ssh_clients.insert(ssh_url, client.downgrade()); diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index c31774c20d3e94f829e8de5d6ca822228735ca18..347a610439c98ae020a7ebf190dd9e1a603df5a1 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -160,7 +160,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: Vec::new(), debug_adapters: Default::default(), @@ -191,7 +190,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: Vec::new(), debug_adapters: Default::default(), @@ -371,7 +369,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), - indexed_docs_providers: BTreeMap::default(), snippets: None, capabilities: Vec::new(), debug_adapters: Default::default(), diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs index adc9638c2998eb1f122df5137577ca7e0cf4c975..a6305118cd3355f69a42914ec86bb5edcfc74810 100644 --- a/crates/extension_host/src/headless_host.rs +++ b/crates/extension_host/src/headless_host.rs @@ -163,6 +163,7 @@ impl HeadlessExtensionStore { queries: LanguageQueries::default(), context_provider: None, toolchain_provider: None, + manifest_name: None, }) }), ); @@ -174,7 +175,7 @@ impl HeadlessExtensionStore { } let wasm_extension: Arc = - Arc::new(WasmExtension::load(&extension_dir, &manifest, wasm_host.clone(), &cx).await?); + Arc::new(WasmExtension::load(&extension_dir, &manifest, wasm_host.clone(), cx).await?); for (language_server_id, language_server_config) in &manifest.language_servers { for language in language_server_config.languages() { diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index d990b670f49221aca2f0af901293c70d341cf029..c5bc21fc1c44659b845b7616aa1714a0872f90f3 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -532,7 +532,7 @@ fn wasm_engine(executor: &BackgroundExecutor) -> wasmtime::Engine { // `Future::poll`. const EPOCH_INTERVAL: Duration = Duration::from_millis(100); let mut timer = Timer::interval(EPOCH_INTERVAL); - while let Some(_) = timer.next().await { + while (timer.next().await).is_some() { // Exit the loop and thread once the engine is dropped. let Some(engine) = engine_ref.upgrade() else { break; @@ -701,16 +701,15 @@ pub fn parse_wasm_extension_version( for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) { if let wasmparser::Payload::CustomSection(s) = part.context("error parsing wasm extension")? + && s.name() == "zed:api-version" { - if s.name() == "zed:api-version" { - version = parse_wasm_extension_version_custom_section(s.data()); - if version.is_none() { - bail!( - "extension {} has invalid zed:api-version section: {:?}", - extension_id, - s.data() - ); - } + version = parse_wasm_extension_version_custom_section(s.data()); + if version.is_none() { + bail!( + "extension {} has invalid zed:api-version section: {:?}", + extension_id, + s.data() + ); } } } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index 767b9033ade3c81c6ac149363676513c72996b7e..84794d5386eda1517808d181eb259a3264f7b82d 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -938,7 +938,7 @@ impl ExtensionImports for WasmState { binary: settings.binary.map(|binary| settings::CommandSettings { path: binary.path, arguments: binary.arguments, - env: binary.env, + env: binary.env.map(|env| env.into_iter().collect()), }), settings: settings.settings, initialization_options: settings.initialization_options, diff --git a/crates/extensions_ui/src/components/feature_upsell.rs b/crates/extensions_ui/src/components/feature_upsell.rs index e2e65f1598ad1a22a3fe854425a32013115616d6..0515dd46d30ce9f7e87331f99542940c3efa837a 100644 --- a/crates/extensions_ui/src/components/feature_upsell.rs +++ b/crates/extensions_ui/src/components/feature_upsell.rs @@ -58,10 +58,9 @@ impl RenderOnce for FeatureUpsell { el.child( Button::new("open_docs", "View Documentation") .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_position(IconPosition::End) .on_click({ - let docs_url = docs_url.clone(); move |_event, _window, cx| { telemetry::event!( "Documentation Viewed", diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index fe3e94f5c20dc1a78ae01defc24e290c18a1a3e6..fd504764b65826ea74e092ea4c11d5576fa51524 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -116,6 +116,7 @@ pub fn init(cx: &mut App) { files: false, directories: true, multiple: false, + prompt: None, }, DirectoryLister::Local( workspace.project().clone(), @@ -693,7 +694,7 @@ impl ExtensionsPage { cx.open_url(&repository_url); } })) - .tooltip(Tooltip::text(repository_url.clone())) + .tooltip(Tooltip::text(repository_url)) })), ) } @@ -703,7 +704,7 @@ impl ExtensionsPage { extension: &ExtensionMetadata, cx: &mut Context, ) -> ExtensionCard { - let this = cx.entity().clone(); + let this = cx.entity(); let status = Self::extension_status(&extension.id, cx); let has_dev_extension = Self::dev_extension_exists(&extension.id, cx); @@ -826,7 +827,7 @@ impl ExtensionsPage { cx.open_url(&repository_url); } })) - .tooltip(Tooltip::text(repository_url.clone())), + .tooltip(Tooltip::text(repository_url)), ) .child( PopoverMenu::new(SharedString::from(format!( @@ -862,7 +863,7 @@ impl ExtensionsPage { window: &mut Window, cx: &mut App, ) -> Entity { - let context_menu = ContextMenu::build(window, cx, |context_menu, window, _| { + ContextMenu::build(window, cx, |context_menu, window, _| { context_menu .entry( "Install Another Version...", @@ -886,9 +887,7 @@ impl ExtensionsPage { cx.write_to_clipboard(ClipboardItem::new_string(authors.join(", "))); } }) - }); - - context_menu + }) } fn show_extension_version_list( @@ -1030,15 +1029,14 @@ impl ExtensionsPage { .read(cx) .extension_manifest_for_id(&extension_id) .cloned() + && let Some(events) = extension::ExtensionEvents::try_global(cx) { - if let Some(events) = extension::ExtensionEvents::try_global(cx) { - events.update(cx, |this, cx| { - this.emit( - extension::Event::ConfigureExtensionRequested(manifest), - cx, - ) - }); - } + events.update(cx, |this, cx| { + this.emit( + extension::Event::ConfigureExtensionRequested(manifest), + cx, + ) + }); } } }) diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index ef357adf35997bfb7560f1e1849ef69e780cd1f9..422979c4297cc72fdf2bf1d14cfba433d155c80e 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -14,7 +14,7 @@ struct FeatureFlags { } pub static ZED_DISABLE_STAFF: LazyLock = LazyLock::new(|| { - std::env::var("ZED_DISABLE_STAFF").map_or(false, |value| !value.is_empty() && value != "0") + std::env::var("ZED_DISABLE_STAFF").is_ok_and(|value| !value.is_empty() && value != "0") }); impl FeatureFlags { @@ -77,14 +77,6 @@ impl FeatureFlag for NotebookFeatureFlag { const NAME: &'static str = "notebooks"; } -pub struct ThreadAutoCaptureFeatureFlag {} -impl FeatureFlag for ThreadAutoCaptureFeatureFlag { - const NAME: &'static str = "thread-auto-capture"; - - fn enabled_for_staff() -> bool { - false - } -} pub struct PanicFeatureFlag; impl FeatureFlag for PanicFeatureFlag { @@ -97,10 +89,21 @@ impl FeatureFlag for JjUiFeatureFlag { const NAME: &'static str = "jj-ui"; } -pub struct AcpFeatureFlag; +pub struct GeminiAndNativeFeatureFlag; + +impl FeatureFlag for GeminiAndNativeFeatureFlag { + // This was previously called "acp". + // + // We renamed it because existing builds used it to enable the Claude Code + // integration too, and we'd like to turn Gemini/Native on in new builds + // without enabling Claude Code in old builds. + const NAME: &'static str = "gemini-and-native"; +} + +pub struct ClaudeCodeFeatureFlag; -impl FeatureFlag for AcpFeatureFlag { - const NAME: &'static str = "acp"; +impl FeatureFlag for ClaudeCodeFeatureFlag { + const NAME: &'static str = "claude-code"; } pub trait FeatureFlagViewExt { diff --git a/crates/feedback/src/system_specs.rs b/crates/feedback/src/system_specs.rs index 7c002d90e94ed5c44a1076aac2788fc8d1150eaa..87642ab9294b3dd4faa32f368056a136f4b79e18 100644 --- a/crates/feedback/src/system_specs.rs +++ b/crates/feedback/src/system_specs.rs @@ -31,7 +31,7 @@ impl SystemSpecs { let architecture = env::consts::ARCH; let commit_sha = match release_channel { ReleaseChannel::Dev | ReleaseChannel::Nightly => { - AppCommitSha::try_global(cx).map(|sha| sha.full().clone()) + AppCommitSha::try_global(cx).map(|sha| sha.full()) } _ => None, }; @@ -135,7 +135,7 @@ impl Display for SystemSpecs { fn try_determine_available_gpus() -> Option { #[cfg(any(target_os = "linux", target_os = "freebsd"))] { - return std::process::Command::new("vulkaninfo") + std::process::Command::new("vulkaninfo") .args(&["--summary"]) .output() .ok() @@ -150,11 +150,11 @@ fn try_determine_available_gpus() -> Option { ] .join("\n") }) - .or(Some("Failed to run `vulkaninfo --summary`".to_string())); + .or(Some("Failed to run `vulkaninfo --summary`".to_string())) } #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] { - return None; + None } } diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index e5ac70bb583be004941eee06476dc9318de1adc4..8aaaa047292065a9db0c47f980e559ca61c04546 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -17,7 +17,7 @@ use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task, WeakEntity, - Window, actions, + Window, actions, rems, }; use open_path_prompt::OpenPathPrompt; use picker::{Picker, PickerDelegate}; @@ -209,11 +209,11 @@ impl FileFinder { let Some(init_modifiers) = self.init_modifiers.take() else { return; }; - if self.picker.read(cx).delegate.has_changed_selected_index { - if !event.modified() || !init_modifiers.is_subset_of(&event) { - self.init_modifiers = None; - window.dispatch_action(menu::Confirm.boxed_clone(), cx); - } + if self.picker.read(cx).delegate.has_changed_selected_index + && (!event.modified() || !init_modifiers.is_subset_of(event)) + { + self.init_modifiers = None; + window.dispatch_action(menu::Confirm.boxed_clone(), cx); } } @@ -267,10 +267,9 @@ impl FileFinder { ) { self.picker.update(cx, |picker, cx| { picker.delegate.include_ignored = match picker.delegate.include_ignored { - Some(true) => match FileFinderSettings::get_global(cx).include_ignored { - Some(_) => Some(false), - None => None, - }, + Some(true) => FileFinderSettings::get_global(cx) + .include_ignored + .map(|_| false), Some(false) => Some(true), None => Some(true), }; @@ -323,34 +322,34 @@ impl FileFinder { ) { self.picker.update(cx, |picker, cx| { let delegate = &mut picker.delegate; - if let Some(workspace) = delegate.workspace.upgrade() { - if let Some(m) = delegate.matches.get(delegate.selected_index()) { - let path = match &m { - Match::History { path, .. } => { - let worktree_id = path.project.worktree_id; - ProjectPath { - worktree_id, - path: Arc::clone(&path.project.path), - } + if let Some(workspace) = delegate.workspace.upgrade() + && let Some(m) = delegate.matches.get(delegate.selected_index()) + { + let path = match &m { + Match::History { path, .. } => { + let worktree_id = path.project.worktree_id; + ProjectPath { + worktree_id, + path: Arc::clone(&path.project.path), } - Match::Search(m) => ProjectPath { - worktree_id: WorktreeId::from_usize(m.0.worktree_id), - path: m.0.path.clone(), - }, - Match::CreateNew(p) => p.clone(), - }; - let open_task = workspace.update(cx, move |workspace, cx| { - workspace.split_path_preview(path, false, Some(split_direction), window, cx) - }); - open_task.detach_and_log_err(cx); - } + } + Match::Search(m) => ProjectPath { + worktree_id: WorktreeId::from_usize(m.0.worktree_id), + path: m.0.path.clone(), + }, + Match::CreateNew(p) => p.clone(), + }; + let open_task = workspace.update(cx, move |workspace, cx| { + workspace.split_path_preview(path, false, Some(split_direction), window, cx) + }); + open_task.detach_and_log_err(cx); } }) } pub fn modal_max_width(width_setting: Option, window: &mut Window) -> Pixels { let window_width = window.viewport_size().width; - let small_width = Pixels(545.); + let small_width = rems(34.).to_pixels(window.rem_size()); match width_setting { None | Some(FileFinderWidth::Small) => small_width, @@ -497,7 +496,7 @@ impl Match { fn panel_match(&self) -> Option<&ProjectPanelOrdMatch> { match self { Match::History { panel_match, .. } => panel_match.as_ref(), - Match::Search(panel_match) => Some(&panel_match), + Match::Search(panel_match) => Some(panel_match), Match::CreateNew(_) => None, } } @@ -537,7 +536,7 @@ impl Matches { self.matches.binary_search_by(|m| { // `reverse()` since if cmp_matches(a, b) == Ordering::Greater, then a is better than b. // And we want the better entries go first. - Self::cmp_matches(self.separate_history, currently_opened, &m, &entry).reverse() + Self::cmp_matches(self.separate_history, currently_opened, m, entry).reverse() }) } } @@ -675,17 +674,17 @@ impl Matches { let path_str = panel_match.0.path.to_string_lossy(); let filename_str = filename.to_string_lossy(); - if let Some(filename_pos) = path_str.rfind(&*filename_str) { - if panel_match.0.positions[0] >= filename_pos { - let mut prev_position = panel_match.0.positions[0]; - for p in &panel_match.0.positions[1..] { - if *p != prev_position + 1 { - return false; - } - prev_position = *p; + if let Some(filename_pos) = path_str.rfind(&*filename_str) + && panel_match.0.positions[0] >= filename_pos + { + let mut prev_position = panel_match.0.positions[0]; + for p in &panel_match.0.positions[1..] { + if *p != prev_position + 1 { + return false; } - return true; + prev_position = *p; } + return true; } } @@ -878,9 +877,7 @@ impl FileFinderDelegate { PathMatchCandidateSet { snapshot: worktree.snapshot(), include_ignored: self.include_ignored.unwrap_or_else(|| { - worktree - .root_entry() - .map_or(false, |entry| entry.is_ignored) + worktree.root_entry().is_some_and(|entry| entry.is_ignored) }), include_root_name, candidates: project::Candidates::Files, @@ -1045,10 +1042,10 @@ impl FileFinderDelegate { ) } else { let mut path = Arc::clone(project_relative_path); - if project_relative_path.as_ref() == Path::new("") { - if let Some(absolute_path) = &entry_path.absolute { - path = Arc::from(absolute_path.as_path()); - } + if project_relative_path.as_ref() == Path::new("") + && let Some(absolute_path) = &entry_path.absolute + { + path = Arc::from(absolute_path.as_path()); } let mut path_match = PathMatch { @@ -1078,23 +1075,21 @@ impl FileFinderDelegate { ), }; - if file_name_positions.is_empty() { - if let Some(user_home_path) = std::env::var("HOME").ok() { - let user_home_path = user_home_path.trim(); - if !user_home_path.is_empty() { - if (&full_path).starts_with(user_home_path) { - full_path.replace_range(0..user_home_path.len(), "~"); - full_path_positions.retain_mut(|pos| { - if *pos >= user_home_path.len() { - *pos -= user_home_path.len(); - *pos += 1; - true - } else { - false - } - }) + if file_name_positions.is_empty() + && let Some(user_home_path) = std::env::var("HOME").ok() + { + let user_home_path = user_home_path.trim(); + if !user_home_path.is_empty() && full_path.starts_with(user_home_path) { + full_path.replace_range(0..user_home_path.len(), "~"); + full_path_positions.retain_mut(|pos| { + if *pos >= user_home_path.len() { + *pos -= user_home_path.len(); + *pos += 1; + true + } else { + false } - } + }) } } @@ -1242,14 +1237,13 @@ impl FileFinderDelegate { /// Skips first history match (that is displayed topmost) if it's currently opened. fn calculate_selected_index(&self, cx: &mut Context>) -> usize { - if FileFinderSettings::get_global(cx).skip_focus_for_active_in_search { - if let Some(Match::History { path, .. }) = self.matches.get(0) { - if Some(path) == self.currently_opened_path.as_ref() { - let elements_after_first = self.matches.len() - 1; - if elements_after_first > 0 { - return 1; - } - } + if FileFinderSettings::get_global(cx).skip_focus_for_active_in_search + && let Some(Match::History { path, .. }) = self.matches.get(0) + && Some(path) == self.currently_opened_path.as_ref() + { + let elements_after_first = self.matches.len() - 1; + if elements_after_first > 0 { + return 1; } } @@ -1310,10 +1304,10 @@ impl PickerDelegate for FileFinderDelegate { .enumerate() .find(|(_, m)| !matches!(m, Match::History { .. })) .map(|(i, _)| i); - if let Some(first_non_history_index) = first_non_history_index { - if first_non_history_index > 0 { - return vec![first_non_history_index - 1]; - } + if let Some(first_non_history_index) = first_non_history_index + && first_non_history_index > 0 + { + return vec![first_non_history_index - 1]; } } Vec::new() @@ -1402,7 +1396,7 @@ impl PickerDelegate for FileFinderDelegate { cx.notify(); Task::ready(()) } else { - let path_position = PathWithPosition::parse_str(&raw_query); + let path_position = PathWithPosition::parse_str(raw_query); #[cfg(windows)] let raw_query = raw_query.trim().to_owned().replace("/", "\\"); @@ -1436,69 +1430,101 @@ impl PickerDelegate for FileFinderDelegate { window: &mut Window, cx: &mut Context>, ) { - if let Some(m) = self.matches.get(self.selected_index()) { - if let Some(workspace) = self.workspace.upgrade() { - let open_task = workspace.update(cx, |workspace, cx| { - let split_or_open = - |workspace: &mut Workspace, - project_path, - window: &mut Window, - cx: &mut Context| { - let allow_preview = - PreviewTabsSettings::get_global(cx).enable_preview_from_file_finder; - if secondary { - workspace.split_path_preview( - project_path, - allow_preview, - None, - window, - cx, - ) - } else { - workspace.open_path_preview( - project_path, - None, - true, - allow_preview, - true, - window, - cx, - ) - } - }; - match &m { - Match::CreateNew(project_path) => { - // Create a new file with the given filename - if secondary { - workspace.split_path_preview( - project_path.clone(), - false, - None, - window, - cx, - ) - } else { - workspace.open_path_preview( - project_path.clone(), - None, - true, - false, - true, - window, - cx, - ) - } + if let Some(m) = self.matches.get(self.selected_index()) + && let Some(workspace) = self.workspace.upgrade() + { + let open_task = workspace.update(cx, |workspace, cx| { + let split_or_open = + |workspace: &mut Workspace, + project_path, + window: &mut Window, + cx: &mut Context| { + let allow_preview = + PreviewTabsSettings::get_global(cx).enable_preview_from_file_finder; + if secondary { + workspace.split_path_preview( + project_path, + allow_preview, + None, + window, + cx, + ) + } else { + workspace.open_path_preview( + project_path, + None, + true, + allow_preview, + true, + window, + cx, + ) } + }; + match &m { + Match::CreateNew(project_path) => { + // Create a new file with the given filename + if secondary { + workspace.split_path_preview( + project_path.clone(), + false, + None, + window, + cx, + ) + } else { + workspace.open_path_preview( + project_path.clone(), + None, + true, + false, + true, + window, + cx, + ) + } + } - Match::History { path, .. } => { - let worktree_id = path.project.worktree_id; - if workspace - .project() - .read(cx) - .worktree_for_id(worktree_id, cx) - .is_some() - { - split_or_open( + Match::History { path, .. } => { + let worktree_id = path.project.worktree_id; + if workspace + .project() + .read(cx) + .worktree_for_id(worktree_id, cx) + .is_some() + { + split_or_open( + workspace, + ProjectPath { + worktree_id, + path: Arc::clone(&path.project.path), + }, + window, + cx, + ) + } else { + match path.absolute.as_ref() { + Some(abs_path) => { + if secondary { + workspace.split_abs_path( + abs_path.to_path_buf(), + false, + window, + cx, + ) + } else { + workspace.open_abs_path( + abs_path.to_path_buf(), + OpenOptions { + visible: Some(OpenVisible::None), + ..Default::default() + }, + window, + cx, + ) + } + } + None => split_or_open( workspace, ProjectPath { worktree_id, @@ -1506,88 +1532,52 @@ impl PickerDelegate for FileFinderDelegate { }, window, cx, - ) - } else { - match path.absolute.as_ref() { - Some(abs_path) => { - if secondary { - workspace.split_abs_path( - abs_path.to_path_buf(), - false, - window, - cx, - ) - } else { - workspace.open_abs_path( - abs_path.to_path_buf(), - OpenOptions { - visible: Some(OpenVisible::None), - ..Default::default() - }, - window, - cx, - ) - } - } - None => split_or_open( - workspace, - ProjectPath { - worktree_id, - path: Arc::clone(&path.project.path), - }, - window, - cx, - ), - } + ), } } - Match::Search(m) => split_or_open( - workspace, - ProjectPath { - worktree_id: WorktreeId::from_usize(m.0.worktree_id), - path: m.0.path.clone(), - }, - window, - cx, - ), } - }); + Match::Search(m) => split_or_open( + workspace, + ProjectPath { + worktree_id: WorktreeId::from_usize(m.0.worktree_id), + path: m.0.path.clone(), + }, + window, + cx, + ), + } + }); - let row = self - .latest_search_query - .as_ref() - .and_then(|query| query.path_position.row) - .map(|row| row.saturating_sub(1)); - let col = self - .latest_search_query - .as_ref() - .and_then(|query| query.path_position.column) - .unwrap_or(0) - .saturating_sub(1); - let finder = self.file_finder.clone(); - - cx.spawn_in(window, async move |_, cx| { - let item = open_task.await.notify_async_err(cx)?; - if let Some(row) = row { - if let Some(active_editor) = item.downcast::() { - active_editor - .downgrade() - .update_in(cx, |editor, window, cx| { - editor.go_to_singleton_buffer_point( - Point::new(row, col), - window, - cx, - ); - }) - .log_err(); - } - } - finder.update(cx, |_, cx| cx.emit(DismissEvent)).ok()?; + let row = self + .latest_search_query + .as_ref() + .and_then(|query| query.path_position.row) + .map(|row| row.saturating_sub(1)); + let col = self + .latest_search_query + .as_ref() + .and_then(|query| query.path_position.column) + .unwrap_or(0) + .saturating_sub(1); + let finder = self.file_finder.clone(); + + cx.spawn_in(window, async move |_, cx| { + let item = open_task.await.notify_async_err(cx)?; + if let Some(row) = row + && let Some(active_editor) = item.downcast::() + { + active_editor + .downgrade() + .update_in(cx, |editor, window, cx| { + editor.go_to_singleton_buffer_point(Point::new(row, col), window, cx); + }) + .log_err(); + } + finder.update(cx, |_, cx| cx.emit(DismissEvent)).ok()?; - Some(()) - }) - .detach(); - } + Some(()) + }) + .detach(); } } @@ -1759,7 +1749,7 @@ impl PickerDelegate for FileFinderDelegate { Some(ContextMenu::build(window, cx, { let focus_handle = focus_handle.clone(); move |menu, _, _| { - menu.context(focus_handle.clone()) + menu.context(focus_handle) .action( "Split Left", pane::SplitLeft.boxed_clone(), diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index db259ccef854b1d3c5c4fae3bc9ebad08e398891..8203d1b1fdf684c3dcbd1fb6c058a7b7e6bab9cb 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -1614,7 +1614,7 @@ async fn test_select_current_open_file_when_no_history(cx: &mut gpui::TestAppCon let picker = open_file_picker(&workspace, cx); picker.update(cx, |finder, _| { - assert_match_selection(&finder, 0, "1_qw"); + assert_match_selection(finder, 0, "1_qw"); }); } @@ -2623,7 +2623,7 @@ async fn open_queried_buffer( workspace: &Entity, cx: &mut gpui::VisualTestContext, ) -> Vec { - let picker = open_file_picker(&workspace, cx); + let picker = open_file_picker(workspace, cx); cx.simulate_input(input); let history_items = picker.update(cx, |finder, _| { diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 68ba7a78b52fee42588b732d7a6a3c582a80061f..4625872e46c690701b304351c6648a8e380f181a 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -75,16 +75,16 @@ impl OpenPathDelegate { .. } => { let mut i = selected_match_index; - if let Some(user_input) = user_input { - if !user_input.exists || !user_input.is_dir { - if i == 0 { - return Some(CandidateInfo { - path: user_input.file.clone(), - is_dir: false, - }); - } else { - i -= 1; - } + if let Some(user_input) = user_input + && (!user_input.exists || !user_input.is_dir) + { + if i == 0 { + return Some(CandidateInfo { + path: user_input.file.clone(), + is_dir: false, + }); + } else { + i -= 1; } } let id = self.string_matches.get(i)?.candidate_id; @@ -112,7 +112,7 @@ impl OpenPathDelegate { entries, .. } => user_input - .into_iter() + .iter() .filter(|user_input| !user_input.exists || !user_input.is_dir) .map(|user_input| user_input.file.string.clone()) .chain(self.string_matches.iter().filter_map(|string_match| { @@ -637,7 +637,7 @@ impl PickerDelegate for OpenPathDelegate { FileIcons::get_folder_icon(false, cx)? } else { let path = path::Path::new(&candidate.path.string); - FileIcons::get_icon(&path, cx)? + FileIcons::get_icon(path, cx)? }; Some(Icon::from_path(icon).color(Color::Muted)) }); @@ -653,7 +653,7 @@ impl PickerDelegate for OpenPathDelegate { if parent_path == &self.prompt_root { format!("{}{}", self.prompt_root, candidate.path.string) } else { - candidate.path.string.clone() + candidate.path.string }, match_positions, )), @@ -684,7 +684,7 @@ impl PickerDelegate for OpenPathDelegate { }; StyledText::new(label) .with_default_highlights( - &window.text_style().clone(), + &window.text_style(), vec![( delta..delta + label_len, HighlightStyle::color(Color::Conflict.color(cx)), @@ -694,7 +694,7 @@ impl PickerDelegate for OpenPathDelegate { } else { StyledText::new(format!("{label} (create)")) .with_default_highlights( - &window.text_style().clone(), + &window.text_style(), vec![( delta..delta + label_len, HighlightStyle::color(Color::Created.color(cx)), @@ -728,7 +728,7 @@ impl PickerDelegate for OpenPathDelegate { .child(LabelLike::new().child(label_with_highlights)), ) } - DirectoryState::None { .. } => return None, + DirectoryState::None { .. } => None, } } diff --git a/crates/file_icons/src/file_icons.rs b/crates/file_icons/src/file_icons.rs index 2f159771b1af850577d18c2fc236aaa58cd71f10..42c00fb12d5e9f0fbb1662eb0941ed70d94382b5 100644 --- a/crates/file_icons/src/file_icons.rs +++ b/crates/file_icons/src/file_icons.rs @@ -33,13 +33,23 @@ impl FileIcons { // TODO: Associate a type with the languages and have the file's language // override these associations - // check if file name is in suffixes - // e.g. catch file named `eslint.config.js` instead of `.eslint.config.js` - if let Some(typ) = path.file_name().and_then(|typ| typ.to_str()) { + if let Some(mut typ) = path.file_name().and_then(|typ| typ.to_str()) { + // check if file name is in suffixes + // e.g. catch file named `eslint.config.js` instead of `.eslint.config.js` let maybe_path = get_icon_from_suffix(typ); if maybe_path.is_some() { return maybe_path; } + + // check if suffix based on first dot is in suffixes + // e.g. consider `module.js` as suffix to angular's module file named `auth.module.js` + while let Some((_, suffix)) = typ.split_once('.') { + let maybe_path = get_icon_from_suffix(suffix); + if maybe_path.is_some() { + return maybe_path; + } + typ = suffix; + } } // primary case: check if the files extension or the hidden file name @@ -62,7 +72,7 @@ impl FileIcons { return maybe_path; } } - return this.get_icon_for_type("default", cx); + this.get_icon_for_type("default", cx) } fn default_icon_theme(cx: &App) -> Option> { diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 633fc1fc994910e74784bac7c1b67a68a5364255..1d4161134ee7ff43c15c450284a570a08d7841cd 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -51,6 +51,7 @@ ashpd.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } +git = { workspace = true, features = ["test-support"] } [features] test-support = ["gpui/test-support", "git/test-support"] diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 04ba656232d168b8d7b222c6bb714a9add6704c0..8a67eddcd775746f5dbfc55c3a9a21a5d1f7d8e3 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -1,8 +1,9 @@ -use crate::{FakeFs, Fs}; +use crate::{FakeFs, FakeFsEntry, Fs}; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use futures::future::{self, BoxFuture, join_all}; use git::{ + Oid, blame::Blame, repository::{ AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository, @@ -10,8 +11,9 @@ use git::{ }, status::{FileStatus, GitStatus, StatusCode, TrackedStatus, UnmergedStatus}, }; -use gpui::{AsyncApp, BackgroundExecutor, SharedString}; +use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task}; use ignore::gitignore::GitignoreBuilder; +use parking_lot::Mutex; use rope::Rope; use smol::future::FutureExt as _; use std::{path::PathBuf, sync::Arc}; @@ -19,6 +21,7 @@ use std::{path::PathBuf, sync::Arc}; #[derive(Clone)] pub struct FakeGitRepository { pub(crate) fs: Arc, + pub(crate) checkpoints: Arc>>, pub(crate) executor: BackgroundExecutor, pub(crate) dot_git_path: PathBuf, pub(crate) repository_dir_path: PathBuf, @@ -183,7 +186,7 @@ impl GitRepository for FakeGitRepository { async move { None }.boxed() } - fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<'_, Result> { + fn status(&self, path_prefixes: &[RepoPath]) -> Task> { let workdir_path = self.dot_git_path.parent().unwrap(); // Load gitignores @@ -311,7 +314,10 @@ impl GitRepository for FakeGitRepository { entries: entries.into(), }) }); - async move { result? }.boxed() + Task::ready(match result { + Ok(result) => result, + Err(e) => Err(e), + }) } fn branches(&self) -> BoxFuture<'_, Result>> { @@ -339,7 +345,7 @@ impl GitRepository for FakeGitRepository { fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { self.with_state_async(true, move |state| { - state.branches.insert(name.to_owned()); + state.branches.insert(name); Ok(()) }) } @@ -402,11 +408,11 @@ impl GitRepository for FakeGitRepository { &self, _paths: Vec, _env: Arc>, - ) -> BoxFuture> { + ) -> BoxFuture<'_, Result<()>> { unimplemented!() } - fn stash_pop(&self, _env: Arc>) -> BoxFuture> { + fn stash_pop(&self, _env: Arc>) -> BoxFuture<'_, Result<()>> { unimplemented!() } @@ -466,22 +472,57 @@ impl GitRepository for FakeGitRepository { } fn checkpoint(&self) -> BoxFuture<'static, Result> { - unimplemented!() + let executor = self.executor.clone(); + let fs = self.fs.clone(); + let checkpoints = self.checkpoints.clone(); + let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf(); + async move { + executor.simulate_random_delay().await; + let oid = Oid::random(&mut executor.rng()); + let entry = fs.entry(&repository_dir_path)?; + checkpoints.lock().insert(oid, entry); + Ok(GitRepositoryCheckpoint { commit_sha: oid }) + } + .boxed() } - fn restore_checkpoint( - &self, - _checkpoint: GitRepositoryCheckpoint, - ) -> BoxFuture<'_, Result<()>> { - unimplemented!() + fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> { + let executor = self.executor.clone(); + let fs = self.fs.clone(); + let checkpoints = self.checkpoints.clone(); + let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf(); + async move { + executor.simulate_random_delay().await; + let checkpoints = checkpoints.lock(); + let entry = checkpoints + .get(&checkpoint.commit_sha) + .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?; + fs.insert_entry(&repository_dir_path, entry.clone())?; + Ok(()) + } + .boxed() } fn compare_checkpoints( &self, - _left: GitRepositoryCheckpoint, - _right: GitRepositoryCheckpoint, + left: GitRepositoryCheckpoint, + right: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - unimplemented!() + let executor = self.executor.clone(); + let checkpoints = self.checkpoints.clone(); + async move { + executor.simulate_random_delay().await; + let checkpoints = checkpoints.lock(); + let left = checkpoints + .get(&left.commit_sha) + .context(format!("invalid left checkpoint: {}", left.commit_sha))?; + let right = checkpoints + .get(&right.commit_sha) + .context(format!("invalid right checkpoint: {}", right.commit_sha))?; + + Ok(left == right) + } + .boxed() } fn diff_checkpoints( @@ -496,3 +537,63 @@ impl GitRepository for FakeGitRepository { unimplemented!() } } + +#[cfg(test)] +mod tests { + use crate::{FakeFs, Fs}; + use gpui::BackgroundExecutor; + use serde_json::json; + use std::path::Path; + use util::path; + + #[gpui::test] + async fn test_checkpoints(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/"), + json!({ + "bar": { + "baz": "qux" + }, + "foo": { + ".git": {}, + "a": "lorem", + "b": "ipsum", + }, + }), + ) + .await; + fs.with_git_state(Path::new("/foo/.git"), true, |_git| {}) + .unwrap(); + let repository = fs.open_repo(Path::new("/foo/.git")).unwrap(); + + let checkpoint_1 = repository.checkpoint().await.unwrap(); + fs.write(Path::new("/foo/b"), b"IPSUM").await.unwrap(); + fs.write(Path::new("/foo/c"), b"dolor").await.unwrap(); + let checkpoint_2 = repository.checkpoint().await.unwrap(); + let checkpoint_3 = repository.checkpoint().await.unwrap(); + + assert!( + repository + .compare_checkpoints(checkpoint_2.clone(), checkpoint_3.clone()) + .await + .unwrap() + ); + assert!( + !repository + .compare_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + .await + .unwrap() + ); + + repository.restore_checkpoint(checkpoint_1).await.unwrap(); + assert_eq!( + fs.files_with_contents(Path::new("")), + [ + (Path::new(path!("/bar/baz")).into(), b"qux".into()), + (Path::new(path!("/foo/a")).into(), b"lorem".into()), + (Path::new(path!("/foo/b")).into(), b"ipsum".into()) + ] + ); + } +} diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index a76ccee2bf8374429f57f28f729d5e4e44046ec9..75312c5c0cec4a4b285ff0320d90eeda1c0a4c6a 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -12,7 +12,7 @@ use gpui::BackgroundExecutor; use gpui::Global; use gpui::ReadGlobal as _; use std::borrow::Cow; -use util::command::new_std_command; +use util::command::{new_smol_command, new_std_command}; #[cfg(unix)] use std::os::fd::{AsFd, AsRawFd}; @@ -134,6 +134,7 @@ pub trait Fs: Send + Sync { fn home_dir(&self) -> Option; fn open_repo(&self, abs_dot_git: &Path) -> Option>; fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String) -> Result<()>; + async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>; fn is_fake(&self) -> bool; async fn is_case_sensitive(&self) -> Result; @@ -419,18 +420,19 @@ impl Fs for RealFs { async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> { #[cfg(windows)] - if let Ok(Some(metadata)) = self.metadata(path).await { - if metadata.is_symlink && metadata.is_dir { - self.remove_dir( - path, - RemoveOptions { - recursive: false, - ignore_if_not_exists: true, - }, - ) - .await?; - return Ok(()); - } + if let Ok(Some(metadata)) = self.metadata(path).await + && metadata.is_symlink + && metadata.is_dir + { + self.remove_dir( + path, + RemoveOptions { + recursive: false, + ignore_if_not_exists: true, + }, + ) + .await?; + return Ok(()); } match smol::fs::remove_file(path).await { @@ -466,11 +468,11 @@ impl Fs for RealFs { #[cfg(any(target_os = "linux", target_os = "freebsd"))] async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { - if let Ok(Some(metadata)) = self.metadata(path).await { - if metadata.is_symlink { - // TODO: trash_file does not support trashing symlinks yet - https://github.com/bilelmoussaoui/ashpd/issues/255 - return self.remove_file(path, RemoveOptions::default()).await; - } + if let Ok(Some(metadata)) = self.metadata(path).await + && metadata.is_symlink + { + // TODO: trash_file does not support trashing symlinks yet - https://github.com/bilelmoussaoui/ashpd/issues/255 + return self.remove_file(path, RemoveOptions::default()).await; } let file = smol::fs::File::open(path).await?; match trash::trash_file(&file.as_fd()).await { @@ -623,13 +625,13 @@ impl Fs for RealFs { async fn is_file(&self, path: &Path) -> bool { smol::fs::metadata(path) .await - .map_or(false, |metadata| metadata.is_file()) + .is_ok_and(|metadata| metadata.is_file()) } async fn is_dir(&self, path: &Path) -> bool { smol::fs::metadata(path) .await - .map_or(false, |metadata| metadata.is_dir()) + .is_ok_and(|metadata| metadata.is_dir()) } async fn metadata(&self, path: &Path) -> Result> { @@ -765,24 +767,23 @@ impl Fs for RealFs { let pending_paths: Arc>> = Default::default(); let watcher = Arc::new(fs_watcher::FsWatcher::new(tx, pending_paths.clone())); - if watcher.add(path).is_err() { - // If the path doesn't exist yet (e.g. settings.json), watch the parent dir to learn when it's created. - if let Some(parent) = path.parent() { - if let Err(e) = watcher.add(parent) { - log::warn!("Failed to watch: {e}"); - } - } + // If the path doesn't exist yet (e.g. settings.json), watch the parent dir to learn when it's created. + if watcher.add(path).is_err() + && let Some(parent) = path.parent() + && let Err(e) = watcher.add(parent) + { + log::warn!("Failed to watch: {e}"); } // Check if path is a symlink and follow the target parent - if let Some(mut target) = self.read_link(&path).await.ok() { + if let Some(mut target) = self.read_link(path).await.ok() { // Check if symlink target is relative path, if so make it absolute - if target.is_relative() { - if let Some(parent) = path.parent() { - target = parent.join(target); - if let Ok(canonical) = self.canonicalize(&target).await { - target = SanitizedPath::from(canonical).as_path().to_path_buf(); - } + if target.is_relative() + && let Some(parent) = path.parent() + { + target = parent.join(target); + if let Ok(canonical) = self.canonicalize(&target).await { + target = SanitizedPath::from(canonical).as_path().to_path_buf(); } } watcher.add(&target).ok(); @@ -839,6 +840,23 @@ impl Fs for RealFs { Ok(()) } + async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()> { + let output = new_smol_command("git") + .current_dir(abs_work_directory) + .args(&["clone", repo_url]) + .output() + .await?; + + if !output.status.success() { + anyhow::bail!( + "git clone failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + + Ok(()) + } + fn is_fake(&self) -> bool { false } @@ -906,7 +924,7 @@ pub struct FakeFs { #[cfg(any(test, feature = "test-support"))] struct FakeFsState { - root: Arc>, + root: FakeFsEntry, next_inode: u64, next_mtime: SystemTime, git_event_tx: smol::channel::Sender, @@ -921,7 +939,7 @@ struct FakeFsState { } #[cfg(any(test, feature = "test-support"))] -#[derive(Debug)] +#[derive(Clone, Debug)] enum FakeFsEntry { File { inode: u64, @@ -935,7 +953,7 @@ enum FakeFsEntry { inode: u64, mtime: MTime, len: u64, - entries: BTreeMap>>, + entries: BTreeMap, git_repo_state: Option>>, }, Symlink { @@ -943,6 +961,67 @@ enum FakeFsEntry { }, } +#[cfg(any(test, feature = "test-support"))] +impl PartialEq for FakeFsEntry { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + ( + Self::File { + inode: l_inode, + mtime: l_mtime, + len: l_len, + content: l_content, + git_dir_path: l_git_dir_path, + }, + Self::File { + inode: r_inode, + mtime: r_mtime, + len: r_len, + content: r_content, + git_dir_path: r_git_dir_path, + }, + ) => { + l_inode == r_inode + && l_mtime == r_mtime + && l_len == r_len + && l_content == r_content + && l_git_dir_path == r_git_dir_path + } + ( + Self::Dir { + inode: l_inode, + mtime: l_mtime, + len: l_len, + entries: l_entries, + git_repo_state: l_git_repo_state, + }, + Self::Dir { + inode: r_inode, + mtime: r_mtime, + len: r_len, + entries: r_entries, + git_repo_state: r_git_repo_state, + }, + ) => { + let same_repo_state = match (l_git_repo_state.as_ref(), r_git_repo_state.as_ref()) { + (Some(l), Some(r)) => Arc::ptr_eq(l, r), + (None, None) => true, + _ => false, + }; + l_inode == r_inode + && l_mtime == r_mtime + && l_len == r_len + && l_entries == r_entries + && same_repo_state + } + (Self::Symlink { target: l_target }, Self::Symlink { target: r_target }) => { + l_target == r_target + } + _ => false, + } + } +} + #[cfg(any(test, feature = "test-support"))] impl FakeFsState { fn get_and_increment_mtime(&mut self) -> MTime { @@ -957,25 +1036,9 @@ impl FakeFsState { inode } - fn read_path(&self, target: &Path) -> Result>> { - Ok(self - .try_read_path(target, true) - .ok_or_else(|| { - anyhow!(io::Error::new( - io::ErrorKind::NotFound, - format!("not found: {target:?}") - )) - })? - .0) - } - - fn try_read_path( - &self, - target: &Path, - follow_symlink: bool, - ) -> Option<(Arc>, PathBuf)> { - let mut path = target.to_path_buf(); + fn canonicalize(&self, target: &Path, follow_symlink: bool) -> Option { let mut canonical_path = PathBuf::new(); + let mut path = target.to_path_buf(); let mut entry_stack = Vec::new(); 'outer: loop { let mut path_components = path.components().peekable(); @@ -985,7 +1048,7 @@ impl FakeFsState { Component::Prefix(prefix_component) => prefix = Some(prefix_component), Component::RootDir => { entry_stack.clear(); - entry_stack.push(self.root.clone()); + entry_stack.push(&self.root); canonical_path.clear(); match prefix { Some(prefix_component) => { @@ -1002,20 +1065,18 @@ impl FakeFsState { canonical_path.pop(); } Component::Normal(name) => { - let current_entry = entry_stack.last().cloned()?; - let current_entry = current_entry.lock(); - if let FakeFsEntry::Dir { entries, .. } = &*current_entry { - let entry = entries.get(name.to_str().unwrap()).cloned()?; - if path_components.peek().is_some() || follow_symlink { - let entry = entry.lock(); - if let FakeFsEntry::Symlink { target, .. } = &*entry { - let mut target = target.clone(); - target.extend(path_components); - path = target; - continue 'outer; - } + let current_entry = *entry_stack.last()?; + if let FakeFsEntry::Dir { entries, .. } = current_entry { + let entry = entries.get(name.to_str().unwrap())?; + if (path_components.peek().is_some() || follow_symlink) + && let FakeFsEntry::Symlink { target, .. } = entry + { + let mut target = target.clone(); + target.extend(path_components); + path = target; + continue 'outer; } - entry_stack.push(entry.clone()); + entry_stack.push(entry); canonical_path = canonical_path.join(name); } else { return None; @@ -1025,19 +1086,74 @@ impl FakeFsState { } break; } - Some((entry_stack.pop()?, canonical_path)) + + if entry_stack.is_empty() { + None + } else { + Some(canonical_path) + } + } + + fn try_entry( + &mut self, + target: &Path, + follow_symlink: bool, + ) -> Option<(&mut FakeFsEntry, PathBuf)> { + let canonical_path = self.canonicalize(target, follow_symlink)?; + + let mut components = canonical_path + .components() + .skip_while(|component| matches!(component, Component::Prefix(_))); + let Some(Component::RootDir) = components.next() else { + panic!( + "the path {:?} was not canonicalized properly {:?}", + target, canonical_path + ) + }; + + let mut entry = &mut self.root; + for component in components { + match component { + Component::Normal(name) => { + if let FakeFsEntry::Dir { entries, .. } = entry { + entry = entries.get_mut(name.to_str().unwrap())?; + } else { + return None; + } + } + _ => { + panic!( + "the path {:?} was not canonicalized properly {:?}", + target, canonical_path + ) + } + } + } + + Some((entry, canonical_path)) + } + + fn entry(&mut self, target: &Path) -> Result<&mut FakeFsEntry> { + Ok(self + .try_entry(target, true) + .ok_or_else(|| { + anyhow!(io::Error::new( + io::ErrorKind::NotFound, + format!("not found: {target:?}") + )) + })? + .0) } - fn write_path(&self, path: &Path, callback: Fn) -> Result + fn write_path(&mut self, path: &Path, callback: Fn) -> Result where - Fn: FnOnce(btree_map::Entry>>) -> Result, + Fn: FnOnce(btree_map::Entry) -> Result, { let path = normalize_path(path); let filename = path.file_name().context("cannot overwrite the root")?; let parent_path = path.parent().unwrap(); - let parent = self.read_path(parent_path)?; - let mut parent = parent.lock(); + let parent = self.entry(parent_path)?; let new_entry = parent .dir_entries(parent_path)? .entry(filename.to_str().unwrap().into()); @@ -1087,13 +1203,13 @@ impl FakeFs { this: this.clone(), executor: executor.clone(), state: Arc::new(Mutex::new(FakeFsState { - root: Arc::new(Mutex::new(FakeFsEntry::Dir { + root: FakeFsEntry::Dir { inode: 0, mtime: MTime(UNIX_EPOCH), len: 0, entries: Default::default(), git_repo_state: None, - })), + }, git_event_tx: tx, next_mtime: UNIX_EPOCH + Self::SYSTEMTIME_INTERVAL, next_inode: 1, @@ -1143,15 +1259,15 @@ impl FakeFs { .write_path(path, move |entry| { match entry { btree_map::Entry::Vacant(e) => { - e.insert(Arc::new(Mutex::new(FakeFsEntry::File { + e.insert(FakeFsEntry::File { inode: new_inode, mtime: new_mtime, content: Vec::new(), len: 0, git_dir_path: None, - }))); + }); } - btree_map::Entry::Occupied(mut e) => match &mut *e.get_mut().lock() { + btree_map::Entry::Occupied(mut e) => match &mut *e.get_mut() { FakeFsEntry::File { mtime, .. } => *mtime = new_mtime, FakeFsEntry::Dir { mtime, .. } => *mtime = new_mtime, FakeFsEntry::Symlink { .. } => {} @@ -1170,7 +1286,7 @@ impl FakeFs { pub async fn insert_symlink(&self, path: impl AsRef, target: PathBuf) { let mut state = self.state.lock(); let path = path.as_ref(); - let file = Arc::new(Mutex::new(FakeFsEntry::Symlink { target })); + let file = FakeFsEntry::Symlink { target }; state .write_path(path.as_ref(), move |e| match e { btree_map::Entry::Vacant(e) => { @@ -1203,13 +1319,13 @@ impl FakeFs { match entry { btree_map::Entry::Vacant(e) => { kind = Some(PathEventKind::Created); - e.insert(Arc::new(Mutex::new(FakeFsEntry::File { + e.insert(FakeFsEntry::File { inode: new_inode, mtime: new_mtime, len: new_len, content: new_content, git_dir_path: None, - }))); + }); } btree_map::Entry::Occupied(mut e) => { kind = Some(PathEventKind::Changed); @@ -1219,7 +1335,7 @@ impl FakeFs { len, content, .. - } = &mut *e.get_mut().lock() + } = e.get_mut() { *mtime = new_mtime; *content = new_content; @@ -1241,9 +1357,8 @@ impl FakeFs { pub fn read_file_sync(&self, path: impl AsRef) -> Result> { let path = path.as_ref(); let path = normalize_path(path); - let state = self.state.lock(); - let entry = state.read_path(&path)?; - let entry = entry.lock(); + let mut state = self.state.lock(); + let entry = state.entry(&path)?; entry.file_content(&path).cloned() } @@ -1251,9 +1366,8 @@ impl FakeFs { let path = path.as_ref(); let path = normalize_path(path); self.simulate_random_delay().await; - let state = self.state.lock(); - let entry = state.read_path(&path)?; - let entry = entry.lock(); + let mut state = self.state.lock(); + let entry = state.entry(&path)?; entry.file_content(&path).cloned() } @@ -1274,6 +1388,25 @@ impl FakeFs { self.state.lock().flush_events(count); } + pub(crate) fn entry(&self, target: &Path) -> Result { + self.state.lock().entry(target).cloned() + } + + pub(crate) fn insert_entry(&self, target: &Path, new_entry: FakeFsEntry) -> Result<()> { + let mut state = self.state.lock(); + state.write_path(target, |entry| { + match entry { + btree_map::Entry::Vacant(vacant_entry) => { + vacant_entry.insert(new_entry); + } + btree_map::Entry::Occupied(mut occupied_entry) => { + occupied_entry.insert(new_entry); + } + } + Ok(()) + }) + } + #[must_use] pub fn insert_tree<'a>( &'a self, @@ -1343,20 +1476,19 @@ impl FakeFs { F: FnOnce(&mut FakeGitRepositoryState, &Path, &Path) -> T, { let mut state = self.state.lock(); - let entry = state.read_path(dot_git).context("open .git")?; - let mut entry = entry.lock(); + let git_event_tx = state.git_event_tx.clone(); + let entry = state.entry(dot_git).context("open .git")?; - if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { + if let FakeFsEntry::Dir { git_repo_state, .. } = entry { let repo_state = git_repo_state.get_or_insert_with(|| { log::debug!("insert git state for {dot_git:?}"); - Arc::new(Mutex::new(FakeGitRepositoryState::new( - state.git_event_tx.clone(), - ))) + Arc::new(Mutex::new(FakeGitRepositoryState::new(git_event_tx))) }); let mut repo_state = repo_state.lock(); let result = f(&mut repo_state, dot_git, dot_git); + drop(repo_state); if emit_git_event { state.emit_event([(dot_git, None)]); } @@ -1380,21 +1512,20 @@ impl FakeFs { } } .clone(); - drop(entry); - let Some((git_dir_entry, canonical_path)) = state.try_read_path(&path, true) else { + let Some((git_dir_entry, canonical_path)) = state.try_entry(&path, true) else { anyhow::bail!("pointed-to git dir {path:?} not found") }; let FakeFsEntry::Dir { git_repo_state, entries, .. - } = &mut *git_dir_entry.lock() + } = git_dir_entry else { anyhow::bail!("gitfile points to a non-directory") }; let common_dir = if let Some(child) = entries.get("commondir") { Path::new( - std::str::from_utf8(child.lock().file_content("commondir".as_ref())?) + std::str::from_utf8(child.file_content("commondir".as_ref())?) .context("commondir content")?, ) .to_owned() @@ -1402,15 +1533,14 @@ impl FakeFs { canonical_path.clone() }; let repo_state = git_repo_state.get_or_insert_with(|| { - Arc::new(Mutex::new(FakeGitRepositoryState::new( - state.git_event_tx.clone(), - ))) + Arc::new(Mutex::new(FakeGitRepositoryState::new(git_event_tx))) }); let mut repo_state = repo_state.lock(); let result = f(&mut repo_state, &canonical_path, &common_dir); if emit_git_event { + drop(repo_state); state.emit_event([(canonical_path, None)]); } @@ -1438,10 +1568,10 @@ impl FakeFs { pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) { self.with_git_state(dot_git, true, |state| { - if let Some(first) = branches.first() { - if state.current_branch_name.is_none() { - state.current_branch_name = Some(first.to_string()) - } + if let Some(first) = branches.first() + && state.current_branch_name.is_none() + { + state.current_branch_name = Some(first.to_string()) } state .branches @@ -1549,7 +1679,7 @@ impl FakeFs { /// by mutating the head, index, and unmerged state. pub fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, FileStatus)]) { let workdir_path = dot_git.parent().unwrap(); - let workdir_contents = self.files_with_contents(&workdir_path); + let workdir_contents = self.files_with_contents(workdir_path); self.with_git_state(dot_git, true, |state| { state.index_contents.clear(); state.head_contents.clear(); @@ -1637,14 +1767,12 @@ impl FakeFs { pub fn paths(&self, include_dot_git: bool) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); - queue.push_back(( - PathBuf::from(util::path!("/")), - self.state.lock().root.clone(), - )); + let state = &*self.state.lock(); + queue.push_back((PathBuf::from(util::path!("/")), &state.root)); while let Some((path, entry)) = queue.pop_front() { - if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() { + if let FakeFsEntry::Dir { entries, .. } = entry { for (name, entry) in entries { - queue.push_back((path.join(name), entry.clone())); + queue.push_back((path.join(name), entry)); } } if include_dot_git @@ -1661,14 +1789,12 @@ impl FakeFs { pub fn directories(&self, include_dot_git: bool) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); - queue.push_back(( - PathBuf::from(util::path!("/")), - self.state.lock().root.clone(), - )); + let state = &*self.state.lock(); + queue.push_back((PathBuf::from(util::path!("/")), &state.root)); while let Some((path, entry)) = queue.pop_front() { - if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() { + if let FakeFsEntry::Dir { entries, .. } = entry { for (name, entry) in entries { - queue.push_back((path.join(name), entry.clone())); + queue.push_back((path.join(name), entry)); } if include_dot_git || !path @@ -1685,17 +1811,14 @@ impl FakeFs { pub fn files(&self) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); - queue.push_back(( - PathBuf::from(util::path!("/")), - self.state.lock().root.clone(), - )); + let state = &*self.state.lock(); + queue.push_back((PathBuf::from(util::path!("/")), &state.root)); while let Some((path, entry)) = queue.pop_front() { - let e = entry.lock(); - match &*e { + match entry { FakeFsEntry::File { .. } => result.push(path), FakeFsEntry::Dir { entries, .. } => { for (name, entry) in entries { - queue.push_back((path.join(name), entry.clone())); + queue.push_back((path.join(name), entry)); } } FakeFsEntry::Symlink { .. } => {} @@ -1707,13 +1830,10 @@ impl FakeFs { pub fn files_with_contents(&self, prefix: &Path) -> Vec<(PathBuf, Vec)> { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); - queue.push_back(( - PathBuf::from(util::path!("/")), - self.state.lock().root.clone(), - )); + let state = &*self.state.lock(); + queue.push_back((PathBuf::from(util::path!("/")), &state.root)); while let Some((path, entry)) = queue.pop_front() { - let e = entry.lock(); - match &*e { + match entry { FakeFsEntry::File { content, .. } => { if path.starts_with(prefix) { result.push((path, content.clone())); @@ -1721,7 +1841,7 @@ impl FakeFs { } FakeFsEntry::Dir { entries, .. } => { for (name, entry) in entries { - queue.push_back((path.join(name), entry.clone())); + queue.push_back((path.join(name), entry)); } } FakeFsEntry::Symlink { .. } => {} @@ -1787,10 +1907,7 @@ impl FakeFsEntry { } } - fn dir_entries( - &mut self, - path: &Path, - ) -> Result<&mut BTreeMap>>> { + fn dir_entries(&mut self, path: &Path) -> Result<&mut BTreeMap> { if let Self::Dir { entries, .. } = self { Ok(entries) } else { @@ -1837,13 +1954,13 @@ struct FakeHandle { impl FileHandle for FakeHandle { fn current_path(&self, fs: &Arc) -> Result { let fs = fs.as_fake(); - let state = fs.state.lock(); - let Some(target) = state.moves.get(&self.inode) else { + let mut state = fs.state.lock(); + let Some(target) = state.moves.get(&self.inode).cloned() else { anyhow::bail!("fake fd not moved") }; - if state.try_read_path(&target, false).is_some() { - return Ok(target.clone()); + if state.try_entry(&target, false).is_some() { + return Ok(target); } anyhow::bail!("fake fd target not found") } @@ -1870,13 +1987,13 @@ impl Fs for FakeFs { state.write_path(&cur_path, |entry| { entry.or_insert_with(|| { created_dirs.push((cur_path.clone(), Some(PathEventKind::Created))); - Arc::new(Mutex::new(FakeFsEntry::Dir { + FakeFsEntry::Dir { inode, mtime, len: 0, entries: Default::default(), git_repo_state: None, - })) + } }); Ok(()) })? @@ -1891,13 +2008,13 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let inode = state.get_and_increment_inode(); let mtime = state.get_and_increment_mtime(); - let file = Arc::new(Mutex::new(FakeFsEntry::File { + let file = FakeFsEntry::File { inode, mtime, len: 0, content: Vec::new(), git_dir_path: None, - })); + }; let mut kind = Some(PathEventKind::Created); state.write_path(path, |entry| { match entry { @@ -1921,7 +2038,7 @@ impl Fs for FakeFs { async fn create_symlink(&self, path: &Path, target: PathBuf) -> Result<()> { let mut state = self.state.lock(); - let file = Arc::new(Mutex::new(FakeFsEntry::Symlink { target })); + let file = FakeFsEntry::Symlink { target }; state .write_path(path.as_ref(), move |e| match e { btree_map::Entry::Vacant(e) => { @@ -1984,7 +2101,7 @@ impl Fs for FakeFs { } })?; - let inode = match *moved_entry.lock() { + let inode = match moved_entry { FakeFsEntry::File { inode, .. } => inode, FakeFsEntry::Dir { inode, .. } => inode, _ => 0, @@ -2033,8 +2150,8 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let mtime = state.get_and_increment_mtime(); let inode = state.get_and_increment_inode(); - let source_entry = state.read_path(&source)?; - let content = source_entry.lock().file_content(&source)?.clone(); + let source_entry = state.entry(&source)?; + let content = source_entry.file_content(&source)?.clone(); let mut kind = Some(PathEventKind::Created); state.write_path(&target, |e| match e { btree_map::Entry::Occupied(e) => { @@ -2048,13 +2165,13 @@ impl Fs for FakeFs { } } btree_map::Entry::Vacant(e) => Ok(Some( - e.insert(Arc::new(Mutex::new(FakeFsEntry::File { + e.insert(FakeFsEntry::File { inode, mtime, len: content.len() as u64, content, git_dir_path: None, - }))) + }) .clone(), )), })?; @@ -2070,8 +2187,7 @@ impl Fs for FakeFs { let base_name = path.file_name().context("cannot remove the root")?; let mut state = self.state.lock(); - let parent_entry = state.read_path(parent_path)?; - let mut parent_entry = parent_entry.lock(); + let parent_entry = state.entry(parent_path)?; let entry = parent_entry .dir_entries(parent_path)? .entry(base_name.to_str().unwrap().into()); @@ -2082,15 +2198,14 @@ impl Fs for FakeFs { anyhow::bail!("{path:?} does not exist"); } } - btree_map::Entry::Occupied(e) => { + btree_map::Entry::Occupied(mut entry) => { { - let mut entry = e.get().lock(); - let children = entry.dir_entries(&path)?; + let children = entry.get_mut().dir_entries(&path)?; if !options.recursive && !children.is_empty() { anyhow::bail!("{path:?} is not empty"); } } - e.remove(); + entry.remove(); } } state.emit_event([(path, Some(PathEventKind::Removed))]); @@ -2104,8 +2219,7 @@ impl Fs for FakeFs { let parent_path = path.parent().context("cannot remove the root")?; let base_name = path.file_name().unwrap(); let mut state = self.state.lock(); - let parent_entry = state.read_path(parent_path)?; - let mut parent_entry = parent_entry.lock(); + let parent_entry = state.entry(parent_path)?; let entry = parent_entry .dir_entries(parent_path)? .entry(base_name.to_str().unwrap().into()); @@ -2115,9 +2229,9 @@ impl Fs for FakeFs { anyhow::bail!("{path:?} does not exist"); } } - btree_map::Entry::Occupied(e) => { - e.get().lock().file_content(&path)?; - e.remove(); + btree_map::Entry::Occupied(mut entry) => { + entry.get_mut().file_content(&path)?; + entry.remove(); } } state.emit_event([(path, Some(PathEventKind::Removed))]); @@ -2131,12 +2245,10 @@ impl Fs for FakeFs { async fn open_handle(&self, path: &Path) -> Result> { self.simulate_random_delay().await; - let state = self.state.lock(); - let entry = state.read_path(&path)?; - let entry = entry.lock(); - let inode = match *entry { - FakeFsEntry::File { inode, .. } => inode, - FakeFsEntry::Dir { inode, .. } => inode, + let mut state = self.state.lock(); + let inode = match state.entry(path)? { + FakeFsEntry::File { inode, .. } => *inode, + FakeFsEntry::Dir { inode, .. } => *inode, _ => unreachable!(), }; Ok(Arc::new(FakeHandle { inode })) @@ -2144,7 +2256,7 @@ impl Fs for FakeFs { async fn load(&self, path: &Path) -> Result { let content = self.load_internal(path).await?; - Ok(String::from_utf8(content.clone())?) + Ok(String::from_utf8(content)?) } async fn load_bytes(&self, path: &Path) -> Result> { @@ -2154,6 +2266,9 @@ impl Fs for FakeFs { async fn atomic_write(&self, path: PathBuf, data: String) -> Result<()> { self.simulate_random_delay().await; let path = normalize_path(path.as_path()); + if let Some(path) = path.parent() { + self.create_dir(path).await?; + } self.write_file_internal(path, data.into_bytes(), true)?; Ok(()) } @@ -2183,8 +2298,8 @@ impl Fs for FakeFs { let path = normalize_path(path); self.simulate_random_delay().await; let state = self.state.lock(); - let (_, canonical_path) = state - .try_read_path(&path, true) + let canonical_path = state + .canonicalize(&path, true) .with_context(|| format!("path does not exist: {path:?}"))?; Ok(canonical_path) } @@ -2192,9 +2307,9 @@ impl Fs for FakeFs { async fn is_file(&self, path: &Path) -> bool { let path = normalize_path(path); self.simulate_random_delay().await; - let state = self.state.lock(); - if let Some((entry, _)) = state.try_read_path(&path, true) { - entry.lock().is_file() + let mut state = self.state.lock(); + if let Some((entry, _)) = state.try_entry(&path, true) { + entry.is_file() } else { false } @@ -2211,17 +2326,16 @@ impl Fs for FakeFs { let path = normalize_path(path); let mut state = self.state.lock(); state.metadata_call_count += 1; - if let Some((mut entry, _)) = state.try_read_path(&path, false) { - let is_symlink = entry.lock().is_symlink(); + if let Some((mut entry, _)) = state.try_entry(&path, false) { + let is_symlink = entry.is_symlink(); if is_symlink { - if let Some(e) = state.try_read_path(&path, true).map(|e| e.0) { + if let Some(e) = state.try_entry(&path, true).map(|e| e.0) { entry = e; } else { return Ok(None); } } - let entry = entry.lock(); Ok(Some(match &*entry { FakeFsEntry::File { inode, mtime, len, .. @@ -2253,12 +2367,11 @@ impl Fs for FakeFs { async fn read_link(&self, path: &Path) -> Result { self.simulate_random_delay().await; let path = normalize_path(path); - let state = self.state.lock(); + let mut state = self.state.lock(); let (entry, _) = state - .try_read_path(&path, false) + .try_entry(&path, false) .with_context(|| format!("path does not exist: {path:?}"))?; - let entry = entry.lock(); - if let FakeFsEntry::Symlink { target } = &*entry { + if let FakeFsEntry::Symlink { target } = entry { Ok(target.clone()) } else { anyhow::bail!("not a symlink: {path:?}") @@ -2273,8 +2386,7 @@ impl Fs for FakeFs { let path = normalize_path(path); let mut state = self.state.lock(); state.read_dir_call_count += 1; - let entry = state.read_path(&path)?; - let mut entry = entry.lock(); + let entry = state.entry(&path)?; let children = entry.dir_entries(&path)?; let paths = children .keys() @@ -2300,19 +2412,18 @@ impl Fs for FakeFs { tx, original_path: path.to_owned(), fs_state: self.state.clone(), - prefixes: Mutex::new(vec![path.to_owned()]), + prefixes: Mutex::new(vec![path]), }); ( Box::pin(futures::StreamExt::filter(rx, { let watcher = watcher.clone(); move |events| { let result = events.iter().any(|evt_path| { - let result = watcher + watcher .prefixes .lock() .iter() - .any(|prefix| evt_path.path.starts_with(prefix)); - result + .any(|prefix| evt_path.path.starts_with(prefix)) }); let executor = executor.clone(); async move { @@ -2338,6 +2449,7 @@ impl Fs for FakeFs { dot_git_path: abs_dot_git.to_path_buf(), repository_dir_path: repository_dir_path.to_owned(), common_dir_path: common_dir_path.to_owned(), + checkpoints: Arc::default(), }) as _ }, ) @@ -2352,6 +2464,10 @@ impl Fs for FakeFs { smol::block_on(self.create_dir(&abs_work_directory_path.join(".git"))) } + async fn git_clone(&self, _repo_url: &str, _abs_work_directory: &Path) -> Result<()> { + anyhow::bail!("Git clone is not supported in fake Fs") + } + fn is_fake(&self) -> bool { true } diff --git a/crates/fs/src/fs_watcher.rs b/crates/fs/src/fs_watcher.rs index 9fdf2ad0b1c84723e2a4218dbca5f06ce65918b4..6ad03ba6dfa2003b1642cbb542e3a9cf0bf13ec9 100644 --- a/crates/fs/src/fs_watcher.rs +++ b/crates/fs/src/fs_watcher.rs @@ -1,6 +1,9 @@ use notify::EventKind; use parking_lot::Mutex; -use std::sync::{Arc, OnceLock}; +use std::{ + collections::HashMap, + sync::{Arc, OnceLock}, +}; use util::{ResultExt, paths::SanitizedPath}; use crate::{PathEvent, PathEventKind, Watcher}; @@ -8,6 +11,7 @@ use crate::{PathEvent, PathEventKind, Watcher}; pub struct FsWatcher { tx: smol::channel::Sender<()>, pending_path_events: Arc>>, + registrations: Mutex, WatcherRegistrationId>>, } impl FsWatcher { @@ -18,10 +22,24 @@ impl FsWatcher { Self { tx, pending_path_events, + registrations: Default::default(), } } } +impl Drop for FsWatcher { + fn drop(&mut self) { + let mut registrations = self.registrations.lock(); + let registrations = registrations.drain(); + + let _ = global(|g| { + for (_, registration) in registrations { + g.remove(registration); + } + }); + } +} + impl Watcher for FsWatcher { fn add(&self, path: &std::path::Path) -> anyhow::Result<()> { let root_path = SanitizedPath::from(path); @@ -29,75 +47,143 @@ impl Watcher for FsWatcher { let tx = self.tx.clone(); let pending_paths = self.pending_path_events.clone(); - use notify::Watcher; + let path: Arc = path.into(); + + if self.registrations.lock().contains_key(&path) { + return Ok(()); + } - global({ + let registration_id = global({ + let path = path.clone(); |g| { - g.add(move |event: ¬ify::Event| { - let kind = match event.kind { - EventKind::Create(_) => Some(PathEventKind::Created), - EventKind::Modify(_) => Some(PathEventKind::Changed), - EventKind::Remove(_) => Some(PathEventKind::Removed), - _ => None, - }; - let mut path_events = event - .paths - .iter() - .filter_map(|event_path| { - let event_path = SanitizedPath::from(event_path); - event_path.starts_with(&root_path).then(|| PathEvent { - path: event_path.as_path().to_path_buf(), - kind, + g.add( + path, + notify::RecursiveMode::NonRecursive, + move |event: ¬ify::Event| { + let kind = match event.kind { + EventKind::Create(_) => Some(PathEventKind::Created), + EventKind::Modify(_) => Some(PathEventKind::Changed), + EventKind::Remove(_) => Some(PathEventKind::Removed), + _ => None, + }; + let mut path_events = event + .paths + .iter() + .filter_map(|event_path| { + let event_path = SanitizedPath::from(event_path); + event_path.starts_with(&root_path).then(|| PathEvent { + path: event_path.as_path().to_path_buf(), + kind, + }) }) - }) - .collect::>(); - - if !path_events.is_empty() { - path_events.sort(); - let mut pending_paths = pending_paths.lock(); - if pending_paths.is_empty() { - tx.try_send(()).ok(); + .collect::>(); + + if !path_events.is_empty() { + path_events.sort(); + let mut pending_paths = pending_paths.lock(); + if pending_paths.is_empty() { + tx.try_send(()).ok(); + } + util::extend_sorted( + &mut *pending_paths, + path_events, + usize::MAX, + |a, b| a.path.cmp(&b.path), + ); } - util::extend_sorted( - &mut *pending_paths, - path_events, - usize::MAX, - |a, b| a.path.cmp(&b.path), - ); - } - }) + }, + ) } - })?; - - global(|g| { - g.watcher - .lock() - .watch(path, notify::RecursiveMode::NonRecursive) })??; + self.registrations.lock().insert(path, registration_id); + Ok(()) } fn remove(&self, path: &std::path::Path) -> anyhow::Result<()> { - use notify::Watcher; - Ok(global(|w| w.watcher.lock().unwatch(path))??) + let Some(registration) = self.registrations.lock().remove(path) else { + return Ok(()); + }; + + global(|w| w.remove(registration)) } } +#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct WatcherRegistrationId(u32); + +struct WatcherRegistrationState { + callback: Arc, + path: Arc, +} + +struct WatcherState { + watchers: HashMap, + path_registrations: HashMap, u32>, + last_registration: WatcherRegistrationId, +} + pub struct GlobalWatcher { + state: Mutex, + + // DANGER: never keep the state lock while holding the watcher lock // two mutexes because calling watcher.add triggers an watcher.event, which needs watchers. #[cfg(target_os = "linux")] - pub(super) watcher: Mutex, + watcher: Mutex, #[cfg(target_os = "freebsd")] - pub(super) watcher: Mutex, + watcher: Mutex, #[cfg(target_os = "windows")] - pub(super) watcher: Mutex, - pub(super) watchers: Mutex>>, + watcher: Mutex, } impl GlobalWatcher { - pub(super) fn add(&self, cb: impl Fn(¬ify::Event) + Send + Sync + 'static) { - self.watchers.lock().push(Box::new(cb)) + #[must_use] + fn add( + &self, + path: Arc, + mode: notify::RecursiveMode, + cb: impl Fn(¬ify::Event) + Send + Sync + 'static, + ) -> anyhow::Result { + use notify::Watcher; + + self.watcher.lock().watch(&path, mode)?; + + let mut state = self.state.lock(); + + let id = state.last_registration; + state.last_registration = WatcherRegistrationId(id.0 + 1); + + let registration_state = WatcherRegistrationState { + callback: Arc::new(cb), + path: path.clone(), + }; + state.watchers.insert(id, registration_state); + *state.path_registrations.entry(path).or_insert(0) += 1; + + Ok(id) + } + + pub fn remove(&self, id: WatcherRegistrationId) { + use notify::Watcher; + let mut state = self.state.lock(); + let Some(registration_state) = state.watchers.remove(&id) else { + return; + }; + + let Some(count) = state.path_registrations.get_mut(®istration_state.path) else { + return; + }; + *count -= 1; + if *count == 0 { + state.path_registrations.remove(®istration_state.path); + + drop(state); + self.watcher + .lock() + .unwatch(®istration_state.path) + .log_err(); + } } } @@ -114,8 +200,16 @@ fn handle_event(event: Result) { return; }; global::<()>(move |watcher| { - for f in watcher.watchers.lock().iter() { - f(&event) + let callbacks = { + let state = watcher.state.lock(); + state + .watchers + .values() + .map(|r| r.callback.clone()) + .collect::>() + }; + for callback in callbacks { + callback(&event); } }) .log_err(); @@ -124,8 +218,12 @@ fn handle_event(event: Result) { pub fn global(f: impl FnOnce(&GlobalWatcher) -> T) -> anyhow::Result { let result = FS_WATCHER_INSTANCE.get_or_init(|| { notify::recommended_watcher(handle_event).map(|file_watcher| GlobalWatcher { + state: Mutex::new(WatcherState { + watchers: Default::default(), + path_registrations: Default::default(), + last_registration: Default::default(), + }), watcher: Mutex::new(file_watcher), - watchers: Default::default(), }) }); match result { diff --git a/crates/fs/src/mac_watcher.rs b/crates/fs/src/mac_watcher.rs index aa75ad31d9beadada32b62ed4d21a612631d31c3..7bd176639f1dccef2da4c4ae8dcb317d0be602cb 100644 --- a/crates/fs/src/mac_watcher.rs +++ b/crates/fs/src/mac_watcher.rs @@ -41,10 +41,9 @@ impl Watcher for MacWatcher { if let Some((watched_path, _)) = handles .range::((Bound::Unbounded, Bound::Included(path))) .next_back() + && path.starts_with(watched_path) { - if path.starts_with(watched_path) { - return Ok(()); - } + return Ok(()); } let (stream, handle) = EventStream::new(&[path], self.latency); diff --git a/crates/fsevent/src/fsevent.rs b/crates/fsevent/src/fsevent.rs index 81ca0a4114253fc38b5d120d1c37dfc9233f7fd1..c97ab5f35d1b1e8463e895da7a309dc7ef3be998 100644 --- a/crates/fsevent/src/fsevent.rs +++ b/crates/fsevent/src/fsevent.rs @@ -178,40 +178,39 @@ impl EventStream { flags.contains(StreamFlags::USER_DROPPED) || flags.contains(StreamFlags::KERNEL_DROPPED) }) + && let Some(last_valid_event_id) = state.last_valid_event_id.take() { - if let Some(last_valid_event_id) = state.last_valid_event_id.take() { - fs::FSEventStreamStop(state.stream); - fs::FSEventStreamInvalidate(state.stream); - fs::FSEventStreamRelease(state.stream); - - let stream_context = fs::FSEventStreamContext { - version: 0, - info, - retain: None, - release: None, - copy_description: None, - }; - let stream = fs::FSEventStreamCreate( - cf::kCFAllocatorDefault, - Self::trampoline, - &stream_context, - state.paths, - last_valid_event_id, - state.latency.as_secs_f64(), - fs::kFSEventStreamCreateFlagFileEvents - | fs::kFSEventStreamCreateFlagNoDefer - | fs::kFSEventStreamCreateFlagWatchRoot, - ); - - state.stream = stream; - fs::FSEventStreamScheduleWithRunLoop( - state.stream, - cf::CFRunLoopGetCurrent(), - cf::kCFRunLoopDefaultMode, - ); - fs::FSEventStreamStart(state.stream); - stream_restarted = true; - } + fs::FSEventStreamStop(state.stream); + fs::FSEventStreamInvalidate(state.stream); + fs::FSEventStreamRelease(state.stream); + + let stream_context = fs::FSEventStreamContext { + version: 0, + info, + retain: None, + release: None, + copy_description: None, + }; + let stream = fs::FSEventStreamCreate( + cf::kCFAllocatorDefault, + Self::trampoline, + &stream_context, + state.paths, + last_valid_event_id, + state.latency.as_secs_f64(), + fs::kFSEventStreamCreateFlagFileEvents + | fs::kFSEventStreamCreateFlagNoDefer + | fs::kFSEventStreamCreateFlagWatchRoot, + ); + + state.stream = stream; + fs::FSEventStreamScheduleWithRunLoop( + state.stream, + cf::CFRunLoopGetCurrent(), + cf::kCFRunLoopDefaultMode, + ); + fs::FSEventStreamStart(state.stream); + stream_restarted = true; } if !stream_restarted { diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index ab2210094da92cd665ee2483642baaacee66d3a4..74656f1d4c86936b630cb8c1b05030f8d46ccb5c 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -12,7 +12,7 @@ workspace = true path = "src/git.rs" [features] -test-support = [] +test-support = ["rand"] [dependencies] anyhow.workspace = true @@ -26,6 +26,7 @@ http_client.workspace = true log.workspace = true parking_lot.workspace = true regex.workspace = true +rand = { workspace = true, optional = true } rope.workspace = true schemars.workspace = true serde.workspace = true @@ -47,3 +48,4 @@ text = { workspace = true, features = ["test-support"] } unindent.workspace = true gpui = { workspace = true, features = ["test-support"] } tempfile.workspace = true +rand.workspace = true diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 2128fa55c370c600105cd05811b0c73f1ea26144..24b2c44218120b1237fb42e04edc9b6784356c57 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -73,6 +73,7 @@ async fn run_git_blame( .current_dir(working_directory) .arg("blame") .arg("--incremental") + .arg("-w") .arg("--contents") .arg("-") .arg(path.as_os_str()) @@ -288,14 +289,12 @@ fn parse_git_blame(output: &str) -> Result> { } }; - if done { - if let Some(entry) = current_entry.take() { - index.insert(entry.sha, entries.len()); + if done && let Some(entry) = current_entry.take() { + index.insert(entry.sha, entries.len()); - // We only want annotations that have a commit. - if !entry.sha.is_zero() { - entries.push(entry); - } + // We only want annotations that have a commit. + if !entry.sha.is_zero() { + entries.push(entry); } } } diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 553361e673a0de1842326f14dd2be976a63156eb..e84014129cf5a423279b84bed897a4fac2528e02 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -93,6 +93,8 @@ actions!( Init, /// Opens all modified files in the editor. OpenModifiedFiles, + /// Clones a repository. + Clone, ] ); @@ -117,6 +119,13 @@ impl Oid { Ok(Self(oid)) } + #[cfg(any(test, feature = "test-support"))] + pub fn random(rng: &mut impl rand::Rng) -> Self { + let mut bytes = [0; 20]; + rng.fill(&mut bytes); + Self::from_bytes(&bytes).unwrap() + } + pub fn as_bytes(&self) -> &[u8] { self.0.as_bytes() } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index dc7ab0af654290a056b120804bb1cf7323149a4a..fd12dafa98575b5d8b0190d6ed4e894ac61e8165 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -6,7 +6,7 @@ use collections::HashMap; use futures::future::BoxFuture; use futures::{AsyncWriteExt, FutureExt as _, select_biased}; use git2::BranchType; -use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString}; +use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString, Task}; use parking_lot::Mutex; use rope::Rope; use schemars::JsonSchema; @@ -269,10 +269,8 @@ impl GitExcludeOverride { pub async fn restore_original(&mut self) -> Result<()> { if let Some(ref original) = self.original_excludes { smol::fs::write(&self.git_exclude_path, original).await?; - } else { - if self.git_exclude_path.exists() { - smol::fs::remove_file(&self.git_exclude_path).await?; - } + } else if self.git_exclude_path.exists() { + smol::fs::remove_file(&self.git_exclude_path).await?; } self.added_excludes = None; @@ -338,7 +336,7 @@ pub trait GitRepository: Send + Sync { fn merge_message(&self) -> BoxFuture<'_, Option>; - fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<'_, Result>; + fn status(&self, path_prefixes: &[RepoPath]) -> Task>; fn branches(&self) -> BoxFuture<'_, Result>>; @@ -399,9 +397,9 @@ pub trait GitRepository: Send + Sync { &self, paths: Vec, env: Arc>, - ) -> BoxFuture>; + ) -> BoxFuture<'_, Result<()>>; - fn stash_pop(&self, env: Arc>) -> BoxFuture>; + fn stash_pop(&self, env: Arc>) -> BoxFuture<'_, Result<()>>; fn push( &self, @@ -858,7 +856,7 @@ impl GitRepository for RealGitRepository { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory) .envs(env.iter()) - .args(["update-index", "--add", "--cacheinfo", "100644", &sha]) + .args(["update-index", "--add", "--cacheinfo", "100644", sha]) .arg(path.to_unix_style()) .output() .await?; @@ -918,7 +916,7 @@ impl GitRepository for RealGitRepository { .context("no stdin for git cat-file subprocess")?; let mut stdin = BufWriter::new(stdin); for rev in &revs { - write!(&mut stdin, "{rev}\n")?; + writeln!(&mut stdin, "{rev}")?; } stdin.flush()?; drop(stdin); @@ -953,25 +951,27 @@ impl GitRepository for RealGitRepository { .boxed() } - fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<'_, Result> { + fn status(&self, path_prefixes: &[RepoPath]) -> Task> { let git_binary_path = self.git_binary_path.clone(); - let working_directory = self.working_directory(); - let path_prefixes = path_prefixes.to_owned(); - self.executor - .spawn(async move { - let output = new_std_command(&git_binary_path) - .current_dir(working_directory?) - .args(git_status_args(&path_prefixes)) - .output()?; - if output.status.success() { - let stdout = String::from_utf8_lossy(&output.stdout); - stdout.parse() - } else { - let stderr = String::from_utf8_lossy(&output.stderr); - anyhow::bail!("git status failed: {stderr}"); - } - }) - .boxed() + let working_directory = match self.working_directory() { + Ok(working_directory) => working_directory, + Err(e) => return Task::ready(Err(e)), + }; + let args = git_status_args(path_prefixes); + log::debug!("Checking for git status in {path_prefixes:?}"); + self.executor.spawn(async move { + let output = new_std_command(&git_binary_path) + .current_dir(working_directory) + .args(args) + .output()?; + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + stdout.parse() + } else { + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!("git status failed: {stderr}"); + } + }) } fn branches(&self) -> BoxFuture<'_, Result>> { @@ -1054,7 +1054,7 @@ impl GitRepository for RealGitRepository { let (_, branch_name) = name.split_once("/").context("Unexpected branch format")?; let revision = revision.get(); let branch_commit = revision.peel_to_commit()?; - let mut branch = repo.branch(&branch_name, &branch_commit, false)?; + let mut branch = repo.branch(branch_name, &branch_commit, false)?; branch.set_upstream(Some(&name))?; branch } else { @@ -1203,7 +1203,7 @@ impl GitRepository for RealGitRepository { &self, paths: Vec, env: Arc>, - ) -> BoxFuture> { + ) -> BoxFuture<'_, Result<()>> { let working_directory = self.working_directory(); self.executor .spawn(async move { @@ -1227,7 +1227,7 @@ impl GitRepository for RealGitRepository { .boxed() } - fn stash_pop(&self, env: Arc>) -> BoxFuture> { + fn stash_pop(&self, env: Arc>) -> BoxFuture<'_, Result<()>> { let working_directory = self.working_directory(); self.executor .spawn(async move { @@ -1445,12 +1445,11 @@ impl GitRepository for RealGitRepository { let mut remote_branches = vec![]; let mut add_if_matching = async |remote_head: &str| { - if let Ok(merge_base) = git_cmd(&["merge-base", &head, remote_head]).await { - if merge_base.trim() == head { - if let Some(s) = remote_head.strip_prefix("refs/remotes/") { - remote_branches.push(s.to_owned().into()); - } - } + if let Ok(merge_base) = git_cmd(&["merge-base", &head, remote_head]).await + && merge_base.trim() == head + && let Some(s) = remote_head.strip_prefix("refs/remotes/") + { + remote_branches.push(s.to_owned().into()); } }; @@ -1572,10 +1571,9 @@ impl GitRepository for RealGitRepository { Err(error) => { if let Some(GitBinaryCommandError { status, .. }) = error.downcast_ref::() + && status.code() == Some(1) { - if status.code() == Some(1) { - return Ok(false); - } + return Ok(false); } Err(error) @@ -2030,7 +2028,7 @@ fn parse_branch_input(input: &str) -> Result> { branches.push(Branch { is_head: is_current_branch, - ref_name: ref_name, + ref_name, most_recent_commit: Some(CommitSummary { sha: head_sha, subject, @@ -2052,7 +2050,7 @@ fn parse_branch_input(input: &str) -> Result> { } fn parse_upstream_track(upstream_track: &str) -> Result { - if upstream_track == "" { + if upstream_track.is_empty() { return Ok(UpstreamTracking::Tracked(UpstreamTrackingStatus { ahead: 0, behind: 0, @@ -2347,7 +2345,7 @@ mod tests { #[allow(clippy::octal_escapes)] let input = "*\0060964da10574cd9bf06463a53bf6e0769c5c45e\0\0refs/heads/zed-patches\0refs/remotes/origin/zed-patches\0\01733187470\0generated protobuf\n"; assert_eq!( - parse_branch_input(&input).unwrap(), + parse_branch_input(input).unwrap(), vec![Branch { is_head: true, ref_name: "refs/heads/zed-patches".into(), diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index 6158b5179838c2b3bd36fb91f2aa9e2286c52ca1..71ca14c5b2c4b82ae7dc21e832a2a07c55de8fc3 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -153,17 +153,11 @@ impl FileStatus { } pub fn is_conflicted(self) -> bool { - match self { - FileStatus::Unmerged { .. } => true, - _ => false, - } + matches!(self, FileStatus::Unmerged { .. }) } pub fn is_ignored(self) -> bool { - match self { - FileStatus::Ignored => true, - _ => false, - } + matches!(self, FileStatus::Ignored) } pub fn has_changes(&self) -> bool { @@ -176,40 +170,31 @@ impl FileStatus { pub fn is_modified(self) -> bool { match self { - FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) { - (StatusCode::Modified, _) | (_, StatusCode::Modified) => true, - _ => false, - }, + FileStatus::Tracked(tracked) => matches!( + (tracked.index_status, tracked.worktree_status), + (StatusCode::Modified, _) | (_, StatusCode::Modified) + ), _ => false, } } pub fn is_created(self) -> bool { match self { - FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) { - (StatusCode::Added, _) | (_, StatusCode::Added) => true, - _ => false, - }, + FileStatus::Tracked(tracked) => matches!( + (tracked.index_status, tracked.worktree_status), + (StatusCode::Added, _) | (_, StatusCode::Added) + ), FileStatus::Untracked => true, _ => false, } } pub fn is_deleted(self) -> bool { - match self { - FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) { - (StatusCode::Deleted, _) | (_, StatusCode::Deleted) => true, - _ => false, - }, - _ => false, - } + matches!(self, FileStatus::Tracked(tracked) if matches!((tracked.index_status, tracked.worktree_status), (StatusCode::Deleted, _) | (_, StatusCode::Deleted))) } pub fn is_untracked(self) -> bool { - match self { - FileStatus::Untracked => true, - _ => false, - } + matches!(self, FileStatus::Untracked) } pub fn summary(self) -> GitSummary { @@ -468,7 +453,7 @@ impl FromStr for GitStatus { Some((path, status)) }) .collect::>(); - entries.sort_unstable_by(|(a, _), (b, _)| a.cmp(&b)); + entries.sort_unstable_by(|(a, _), (b, _)| a.cmp(b)); // When a file exists in HEAD, is deleted in the index, and exists again in the working copy, // git produces two lines for it, one reading `D ` (deleted in index, unmodified in working copy) // and the other reading `??` (untracked). Merge these two into the equivalent of `DA`. diff --git a/crates/git_hosting_providers/src/git_hosting_providers.rs b/crates/git_hosting_providers/src/git_hosting_providers.rs index b31412ed4a46b0dc2695ae0229638fad409de13c..1d88c47f2e26fc9ad4e27b1e36351198c4365caf 100644 --- a/crates/git_hosting_providers/src/git_hosting_providers.rs +++ b/crates/git_hosting_providers/src/git_hosting_providers.rs @@ -49,13 +49,13 @@ pub fn register_additional_providers( pub fn get_host_from_git_remote_url(remote_url: &str) -> Result { maybe!({ - if let Some(remote_url) = remote_url.strip_prefix("git@") { - if let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') { - return Some(host.to_string()); - } + if let Some(remote_url) = remote_url.strip_prefix("git@") + && let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') + { + return Some(host.to_string()); } - Url::parse(&remote_url) + Url::parse(remote_url) .ok() .and_then(|remote_url| remote_url.host_str().map(|host| host.to_string())) }) diff --git a/crates/git_hosting_providers/src/providers/chromium.rs b/crates/git_hosting_providers/src/providers/chromium.rs index b68c629ec7faaf9e37316cd0f7fb4f297b55f502..5d940fb496be6fde2778272abe640987e3b2a4af 100644 --- a/crates/git_hosting_providers/src/providers/chromium.rs +++ b/crates/git_hosting_providers/src/providers/chromium.rs @@ -292,7 +292,7 @@ mod tests { assert_eq!( Chromium - .extract_pull_request(&remote, &message) + .extract_pull_request(&remote, message) .unwrap() .url .as_str(), diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 30f8d058a7c46798209685930518f4b040dbe714..4475afeb495f41e89273ce0336d830db4cc869cf 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -474,7 +474,7 @@ mod tests { assert_eq!( github - .extract_pull_request(&remote, &message) + .extract_pull_request(&remote, message) .unwrap() .url .as_str(), @@ -488,6 +488,6 @@ mod tests { See the original PR, this is a fix. "# }; - assert_eq!(github.extract_pull_request(&remote, &message), None); + assert_eq!(github.extract_pull_request(&remote, message), None); } } diff --git a/crates/git_ui/src/blame_ui.rs b/crates/git_ui/src/blame_ui.rs index f910de7bbe461ea8edf7addda4acad1b712a6f60..2768e3dc6863b008693d1db853f24a2bf87e1b29 100644 --- a/crates/git_ui/src/blame_ui.rs +++ b/crates/git_ui/src/blame_ui.rs @@ -172,7 +172,7 @@ impl BlameRenderer for GitBlameRenderer { .clone() .unwrap_or("".to_string()) .into(), - author_email: blame.author_mail.clone().unwrap_or("".to_string()).into(), + author_email: blame.author_mail.unwrap_or("".to_string()).into(), message: details, }; @@ -186,7 +186,7 @@ impl BlameRenderer for GitBlameRenderer { .get(0..8) .map(|sha| sha.to_string().into()) .unwrap_or_else(|| commit_details.sha.clone()); - let full_sha = commit_details.sha.to_string().clone(); + let full_sha = commit_details.sha.to_string(); let absolute_timestamp = format_local_timestamp( commit_details.commit_time, OffsetDateTime::now_utc(), @@ -377,7 +377,7 @@ impl BlameRenderer for GitBlameRenderer { has_parent: true, }, repository.downgrade(), - workspace.clone(), + workspace, window, cx, ) diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index b74fa649b04ddc2ee5643965f3e0cdf0eb27e235..fb56cdcc5def31765d26545fcfa79b5f8c44e884 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -48,7 +48,7 @@ pub fn open( window: &mut Window, cx: &mut Context, ) { - let repository = workspace.project().read(cx).active_repository(cx).clone(); + let repository = workspace.project().read(cx).active_repository(cx); let style = BranchListStyle::Modal; workspace.toggle_modal(window, cx, |window, cx| { BranchList::new(repository, style, rems(34.), window, cx) @@ -144,7 +144,7 @@ impl BranchList { }) .detach_and_log_err(cx); - let delegate = BranchListDelegate::new(repository.clone(), style); + let delegate = BranchListDelegate::new(repository, style); let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); let _subscription = cx.subscribe(&picker, |_, _, _, cx| { @@ -473,7 +473,7 @@ impl PickerDelegate for BranchListDelegate { && entry.is_new { Some( - IconButton::new("branch-from-default", IconName::GitBranchSmall) + IconButton::new("branch-from-default", IconName::GitBranchAlt) .on_click(cx.listener(move |this, _, window, cx| { this.delegate.set_selected_index(ix, window, cx); this.delegate.confirm(true, window, cx); diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 5dfa800ae59e5a5b5cc804a417d0d13400a13ba9..cae4d28a83b01a8195c891d2a7569803cfecd697 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -35,7 +35,7 @@ impl ModalContainerProperties { // Calculate width based on character width let mut modal_width = 460.0; - let style = window.text_style().clone(); + let style = window.text_style(); let font_id = window.text_system().resolve_font(&style.font()); let font_size = style.font_size.to_pixels(window.rem_size()); @@ -135,11 +135,10 @@ impl CommitModal { .as_ref() .and_then(|repo| repo.read(cx).head_commit.as_ref()) .is_some() + && !git_panel.amend_pending() { - if !git_panel.amend_pending() { - git_panel.set_amend_pending(true, cx); - git_panel.load_last_commit_message_if_empty(cx); - } + git_panel.set_amend_pending(true, cx); + git_panel.load_last_commit_message_if_empty(cx); } } ForceMode::Commit => { @@ -180,7 +179,7 @@ impl CommitModal { let commit_editor = git_panel.update(cx, |git_panel, cx| { git_panel.set_modal_open(true, cx); - let buffer = git_panel.commit_message_buffer(cx).clone(); + let buffer = git_panel.commit_message_buffer(cx); let panel_editor = git_panel.commit_editor.clone(); let project = git_panel.project.clone(); @@ -195,12 +194,12 @@ impl CommitModal { let commit_message = commit_editor.read(cx).text(cx); - if let Some(suggested_commit_message) = suggested_commit_message { - if commit_message.is_empty() { - commit_editor.update(cx, |editor, cx| { - editor.set_placeholder_text(suggested_commit_message, cx); - }); - } + if let Some(suggested_commit_message) = suggested_commit_message + && commit_message.is_empty() + { + commit_editor.update(cx, |editor, cx| { + editor.set_placeholder_text(suggested_commit_message, cx); + }); } let focus_handle = commit_editor.focus_handle(cx); @@ -272,7 +271,7 @@ impl CommitModal { .child( div() .px_1() - .child(Icon::new(IconName::ChevronDownSmall).size(IconSize::XSmall)), + .child(Icon::new(IconName::ChevronDown).size(IconSize::XSmall)), ), ) .menu({ @@ -286,7 +285,7 @@ impl CommitModal { Some(ContextMenu::build(window, cx, |context_menu, _, _| { context_menu .when_some(keybinding_target.clone(), |el, keybinding_target| { - el.context(keybinding_target.clone()) + el.context(keybinding_target) }) .when(has_previous_commit, |this| { this.toggleable_entry( @@ -392,15 +391,9 @@ impl CommitModal { }); let focus_handle = self.focus_handle(cx); - let close_kb_hint = - if let Some(close_kb) = ui::KeyBinding::for_action(&menu::Cancel, window, cx) { - Some( - KeybindingHint::new(close_kb, cx.theme().colors().editor_background) - .suffix("Cancel"), - ) - } else { - None - }; + let close_kb_hint = ui::KeyBinding::for_action(&menu::Cancel, window, cx).map(|close_kb| { + KeybindingHint::new(close_kb, cx.theme().colors().editor_background).suffix("Cancel") + }); h_flex() .group("commit_editor_footer") @@ -483,7 +476,7 @@ impl CommitModal { }), self.render_git_commit_menu( ElementId::Name(format!("split-button-right-{}", commit_label).into()), - Some(focus_handle.clone()), + Some(focus_handle), ) .into_any_element(), )), diff --git a/crates/git_ui/src/commit_tooltip.rs b/crates/git_ui/src/commit_tooltip.rs index 00ab911610c92dff4094452c55662447c4291259..a470bc69256068e687542334544bd9d2fc19c0cb 100644 --- a/crates/git_ui/src/commit_tooltip.rs +++ b/crates/git_ui/src/commit_tooltip.rs @@ -181,7 +181,7 @@ impl Render for CommitTooltip { .get(0..8) .map(|sha| sha.to_string().into()) .unwrap_or_else(|| self.commit.sha.clone()); - let full_sha = self.commit.sha.to_string().clone(); + let full_sha = self.commit.sha.to_string(); let absolute_timestamp = format_local_timestamp( self.commit.commit_time, OffsetDateTime::now_utc(), diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index c8c237fe90f12f2ac4ead04e0f2f0b4955f8bc1c..d428ccbb0509702ee2535fb8c8e95b059fa24499 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -88,11 +88,10 @@ impl CommitView { let ix = pane.items().position(|item| { let commit_view = item.downcast::(); commit_view - .map_or(false, |view| view.read(cx).commit.sha == commit.sha) + .is_some_and(|view| view.read(cx).commit.sha == commit.sha) }); if let Some(ix) = ix { pane.activate_item(ix, true, true, window, cx); - return; } else { pane.add_item(Box::new(commit_view), true, true, None, window, cx); } @@ -160,7 +159,7 @@ impl CommitView { }); } - cx.spawn(async move |this, mut cx| { + cx.spawn(async move |this, cx| { for file in commit_diff.files { let is_deleted = file.new_text.is_none(); let new_text = file.new_text.unwrap_or_default(); @@ -179,9 +178,9 @@ impl CommitView { worktree_id, }) as Arc; - let buffer = build_buffer(new_text, file, &language_registry, &mut cx).await?; + let buffer = build_buffer(new_text, file, &language_registry, cx).await?; let buffer_diff = - build_buffer_diff(old_text, &buffer, &language_registry, &mut cx).await?; + build_buffer_diff(old_text, &buffer, &language_registry, cx).await?; this.update(cx, |this, cx| { this.multibuffer.update(cx, |multibuffer, cx| { diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 0bbb9411be9ef8a8e6b73d11cc4d01126570741f..ee1b82920d7621f6e5b1d4ab9a9b44e151fbf82a 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -55,7 +55,7 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity, cx: &mu buffers: Default::default(), }); - let buffers = buffer.read(cx).all_buffers().clone(); + let buffers = buffer.read(cx).all_buffers(); for buffer in buffers { buffer_added(editor, buffer, cx); } @@ -112,7 +112,7 @@ fn excerpt_for_buffer_updated( } fn buffer_added(editor: &mut Editor, buffer: Entity, cx: &mut Context) { - let Some(project) = &editor.project else { + let Some(project) = editor.project() else { return; }; let git_store = project.read(cx).git_store().clone(); @@ -129,7 +129,7 @@ fn buffer_added(editor: &mut Editor, buffer: Entity, cx: &mut Context( where T: IntoEnumIterator + VariantNames + 'static, { - let rx = window.prompt(PromptLevel::Info, msg, detail, &T::VARIANTS, cx); + let rx = window.prompt(PromptLevel::Info, msg, detail, T::VARIANTS, cx); cx.spawn(async move |_| Ok(T::iter().nth(rx.await?).unwrap())) } @@ -426,7 +426,7 @@ impl GitPanel { let git_store = project.read(cx).git_store().clone(); let active_repository = project.read(cx).active_repository(cx); - let git_panel = cx.new(|cx| { + cx.new(|cx| { let focus_handle = cx.focus_handle(); cx.on_focus(&focus_handle, window, Self::focus_in).detach(); cx.on_focus_out(&focus_handle, window, |this, _, window, cx| { @@ -563,9 +563,7 @@ impl GitPanel { this.schedule_update(false, window, cx); this - }); - - git_panel + }) } fn hide_scrollbars(&mut self, window: &mut Window, cx: &mut Context) { @@ -652,14 +650,14 @@ impl GitPanel { if GitPanelSettings::get_global(cx).sort_by_path { return self .entries - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path)) + .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) .ok(); } if self.conflicted_count > 0 { let conflicted_start = 1; if let Ok(ix) = self.entries[conflicted_start..conflicted_start + self.conflicted_count] - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path)) + .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) { return Some(conflicted_start + ix); } @@ -671,7 +669,7 @@ impl GitPanel { 0 } + 1; if let Ok(ix) = self.entries[tracked_start..tracked_start + self.tracked_count] - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path)) + .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) { return Some(tracked_start + ix); } @@ -687,7 +685,7 @@ impl GitPanel { 0 } + 1; if let Ok(ix) = self.entries[untracked_start..untracked_start + self.new_count] - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path)) + .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) { return Some(untracked_start + ix); } @@ -775,7 +773,7 @@ impl GitPanel { if window .focused(cx) - .map_or(false, |focused| self.focus_handle == focused) + .is_some_and(|focused| self.focus_handle == focused) { dispatch_context.add("menu"); dispatch_context.add("ChangesList"); @@ -894,9 +892,7 @@ impl GitPanel { let have_entries = self .active_repository .as_ref() - .map_or(false, |active_repository| { - active_repository.read(cx).status_summary().count > 0 - }); + .is_some_and(|active_repository| active_repository.read(cx).status_summary().count > 0); if have_entries && self.selected_entry.is_none() { self.selected_entry = Some(1); self.scroll_to_selected_entry(cx); @@ -926,19 +922,17 @@ impl GitPanel { let workspace = self.workspace.upgrade()?; let git_repo = self.active_repository.as_ref()?; - if let Some(project_diff) = workspace.read(cx).active_item_as::(cx) { - if let Some(project_path) = project_diff.read(cx).active_path(cx) { - if Some(&entry.repo_path) - == git_repo - .read(cx) - .project_path_to_repo_path(&project_path, cx) - .as_ref() - { - project_diff.focus_handle(cx).focus(window); - project_diff.update(cx, |project_diff, cx| project_diff.autoscroll(cx)); - return None; - } - } + if let Some(project_diff) = workspace.read(cx).active_item_as::(cx) + && let Some(project_path) = project_diff.read(cx).active_path(cx) + && Some(&entry.repo_path) + == git_repo + .read(cx) + .project_path_to_repo_path(&project_path, cx) + .as_ref() + { + project_diff.focus_handle(cx).focus(window); + project_diff.update(cx, |project_diff, cx| project_diff.autoscroll(cx)); + return None; }; self.workspace @@ -1202,16 +1196,13 @@ impl GitPanel { window, cx, ); - cx.spawn(async move |this, cx| match prompt.await { - Ok(RestoreCancel::RestoreTrackedFiles) => { + cx.spawn(async move |this, cx| { + if let Ok(RestoreCancel::RestoreTrackedFiles) = prompt.await { this.update(cx, |this, cx| { this.perform_checkout(entries, cx); }) .ok(); } - _ => { - return; - } }) .detach(); } @@ -1341,10 +1332,10 @@ impl GitPanel { .iter() .filter_map(|entry| entry.status_entry()) .filter(|status_entry| { - section.contains(&status_entry, repository) + section.contains(status_entry, repository) && status_entry.staging.as_bool() != Some(goal_staged_state) }) - .map(|status_entry| status_entry.clone()) + .cloned() .collect::>(); (goal_staged_state, entries) @@ -1476,7 +1467,6 @@ impl GitPanel { .read(cx) .as_singleton() .unwrap() - .clone() } fn toggle_staged_for_selected( @@ -1642,13 +1632,12 @@ impl GitPanel { fn has_commit_message(&self, cx: &mut Context) -> bool { let text = self.commit_editor.read(cx).text(cx); if !text.trim().is_empty() { - return true; + true } else if text.is_empty() { - return self - .suggest_commit_message(cx) - .is_some_and(|text| !text.trim().is_empty()); + self.suggest_commit_message(cx) + .is_some_and(|text| !text.trim().is_empty()) } else { - return false; + false } } @@ -1833,7 +1822,9 @@ impl GitPanel { let git_status_entry = if let Some(staged_entry) = &self.single_staged_entry { Some(staged_entry) - } else if let Some(single_tracked_entry) = &self.single_tracked_entry { + } else if self.total_staged_count() == 0 + && let Some(single_tracked_entry) = &self.single_tracked_entry + { Some(single_tracked_entry) } else { None @@ -1950,7 +1941,7 @@ impl GitPanel { thinking_allowed: false, }; - let stream = model.stream_completion_text(request, &cx); + let stream = model.stream_completion_text(request, cx); match stream.await { Ok(mut messages) => { if !text_empty { @@ -2081,6 +2072,100 @@ impl GitPanel { .detach_and_log_err(cx); } + pub(crate) fn git_clone(&mut self, repo: String, window: &mut Window, cx: &mut Context) { + let path = cx.prompt_for_paths(gpui::PathPromptOptions { + files: false, + directories: true, + multiple: false, + prompt: Some("Select as Repository Destination".into()), + }); + + let workspace = self.workspace.clone(); + + cx.spawn_in(window, async move |this, cx| { + let mut paths = path.await.ok()?.ok()??; + let mut path = paths.pop()?; + let repo_name = repo + .split(std::path::MAIN_SEPARATOR_STR) + .last()? + .strip_suffix(".git")? + .to_owned(); + + let fs = this.read_with(cx, |this, _| this.fs.clone()).ok()?; + + let prompt_answer = match fs.git_clone(&repo, path.as_path()).await { + Ok(_) => cx.update(|window, cx| { + window.prompt( + PromptLevel::Info, + &format!("Git Clone: {}", repo_name), + None, + &["Add repo to project", "Open repo in new project"], + cx, + ) + }), + Err(e) => { + this.update(cx, |this: &mut GitPanel, cx| { + let toast = StatusToast::new(e.to_string(), cx, |this, _| { + this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error)) + .dismiss_button(true) + }); + + this.workspace + .update(cx, |workspace, cx| { + workspace.toggle_status_toast(toast, cx); + }) + .ok(); + }) + .ok()?; + + return None; + } + } + .ok()?; + + path.push(repo_name); + match prompt_answer.await.ok()? { + 0 => { + workspace + .update(cx, |workspace, cx| { + workspace + .project() + .update(cx, |project, cx| { + project.create_worktree(path.as_path(), true, cx) + }) + .detach(); + }) + .ok(); + } + 1 => { + workspace + .update(cx, move |workspace, cx| { + workspace::open_new( + Default::default(), + workspace.app_state().clone(), + cx, + move |workspace, _, cx| { + cx.activate(true); + workspace + .project() + .update(cx, |project, cx| { + project.create_worktree(&path, true, cx) + }) + .detach(); + }, + ) + .detach(); + }) + .ok(); + } + _ => {} + } + + Some(()) + }) + .detach(); + } + pub(crate) fn git_init(&mut self, window: &mut Window, cx: &mut Context) { let worktrees = self .project @@ -2090,7 +2175,7 @@ impl GitPanel { let worktree = if worktrees.len() == 1 { Task::ready(Some(worktrees.first().unwrap().clone())) - } else if worktrees.len() == 0 { + } else if worktrees.is_empty() { let result = window.prompt( PromptLevel::Warning, "Unable to initialize a git repository", @@ -2418,10 +2503,11 @@ impl GitPanel { new_co_authors.push((name.clone(), email.clone())) } } - if !project.is_local() && !project.is_read_only(cx) { - if let Some(local_committer) = self.local_committer(room, cx) { - new_co_authors.push(local_committer); - } + if !project.is_local() + && !project.is_read_only(cx) + && let Some(local_committer) = self.local_committer(room, cx) + { + new_co_authors.push(local_committer); } new_co_authors } @@ -2660,35 +2746,34 @@ impl GitPanel { for pending in self.pending.iter() { if pending.target_status == TargetStatus::Staged { pending_staged_count += pending.entries.len(); - last_pending_staged = pending.entries.iter().next().cloned(); + last_pending_staged = pending.entries.first().cloned(); } - if let Some(single_staged) = &single_staged_entry { - if pending + if let Some(single_staged) = &single_staged_entry + && pending .entries .iter() .any(|entry| entry.repo_path == single_staged.repo_path) - { - pending_status_for_single_staged = Some(pending.target_status); - } + { + pending_status_for_single_staged = Some(pending.target_status); } } - if conflict_entries.len() == 0 && staged_count == 1 && pending_staged_count == 0 { + if conflict_entries.is_empty() && staged_count == 1 && pending_staged_count == 0 { match pending_status_for_single_staged { Some(TargetStatus::Staged) | None => { self.single_staged_entry = single_staged_entry; } _ => {} } - } else if conflict_entries.len() == 0 && pending_staged_count == 1 { + } else if conflict_entries.is_empty() && pending_staged_count == 1 { self.single_staged_entry = last_pending_staged; } - if conflict_entries.len() == 0 && changed_entries.len() == 1 { + if conflict_entries.is_empty() && changed_entries.len() == 1 { self.single_tracked_entry = changed_entries.first().cloned(); } - if conflict_entries.len() > 0 { + if !conflict_entries.is_empty() { self.entries.push(GitListEntry::Header(GitHeaderEntry { header: Section::Conflict, })); @@ -2696,7 +2781,7 @@ impl GitPanel { .extend(conflict_entries.into_iter().map(GitListEntry::Status)); } - if changed_entries.len() > 0 { + if !changed_entries.is_empty() { if !sort_by_path { self.entries.push(GitListEntry::Header(GitHeaderEntry { header: Section::Tracked, @@ -2705,7 +2790,7 @@ impl GitPanel { self.entries .extend(changed_entries.into_iter().map(GitListEntry::Status)); } - if new_entries.len() > 0 { + if !new_entries.is_empty() { self.entries.push(GitListEntry::Header(GitHeaderEntry { header: Section::New, })); @@ -2844,8 +2929,7 @@ impl GitPanel { .matches(git::repository::REMOTE_CANCELLED_BY_USER) .next() .is_some() - { - return; // Hide the cancelled by user message + { // Hide the cancelled by user message } else { workspace.update(cx, |workspace, cx| { let workspace_weak = cx.weak_entity(); @@ -2899,11 +2983,9 @@ impl GitPanel { let status_toast = StatusToast::new(message, cx, move |this, _cx| { use remote_output::SuccessStyle::*; match style { - Toast { .. } => { - this.icon(ToastIcon::new(IconName::GitBranchSmall).color(Color::Muted)) - } + Toast => this.icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted)), ToastWithLog { output } => this - .icon(ToastIcon::new(IconName::GitBranchSmall).color(Color::Muted)) + .icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted)) .action("View Log", move |window, cx| { let output = output.clone(); let output = @@ -2915,7 +2997,7 @@ impl GitPanel { .ok(); }), PushPrLink { text, link } => this - .icon(ToastIcon::new(IconName::GitBranchSmall).color(Color::Muted)) + .icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted)) .action(text, move |_, cx| cx.open_url(&link)), } }); @@ -3109,7 +3191,7 @@ impl GitPanel { .justify_center() .border_l_1() .border_color(cx.theme().colors().border) - .child(Icon::new(IconName::ChevronDownSmall).size(IconSize::XSmall)), + .child(Icon::new(IconName::ChevronDown).size(IconSize::XSmall)), ), ) .menu({ @@ -3122,7 +3204,7 @@ impl GitPanel { Some(ContextMenu::build(window, cx, |context_menu, _, _| { context_menu .when_some(keybinding_target.clone(), |el, keybinding_target| { - el.context(keybinding_target.clone()) + el.context(keybinding_target) }) .when(has_previous_commit, |this| { this.toggleable_entry( @@ -3178,12 +3260,10 @@ impl GitPanel { } else { "Amend Tracked" } + } else if self.has_staged_changes() { + "Commit" } else { - if self.has_staged_changes() { - "Commit" - } else { - "Commit Tracked" - } + "Commit Tracked" } } @@ -3304,7 +3384,7 @@ impl GitPanel { let enable_coauthors = self.render_co_authors(cx); let editor_focus_handle = self.commit_editor.focus_handle(cx); - let expand_tooltip_focus_handle = editor_focus_handle.clone(); + let expand_tooltip_focus_handle = editor_focus_handle; let branch = active_repository.read(cx).branch.clone(); let head_commit = active_repository.read(cx).head_commit.clone(); @@ -3317,7 +3397,7 @@ impl GitPanel { * MAX_PANEL_EDITOR_LINES + gap; - let git_panel = cx.entity().clone(); + let git_panel = cx.entity(); let display_name = SharedString::from(Arc::from( active_repository .read(cx) @@ -3333,7 +3413,7 @@ impl GitPanel { display_name, branch, head_commit, - Some(git_panel.clone()), + Some(git_panel), )) .child( panel_editor_container(window, cx) @@ -3484,7 +3564,7 @@ impl GitPanel { }), self.render_git_commit_menu( ElementId::Name(format!("split-button-right-{}", title).into()), - Some(commit_tooltip_focus_handle.clone()), + Some(commit_tooltip_focus_handle), cx, ) .into_any_element(), @@ -3550,7 +3630,7 @@ impl GitPanel { CommitView::open( commit.clone(), repo.clone(), - workspace.clone().clone(), + workspace.clone(), window, cx, ); @@ -4258,7 +4338,7 @@ impl GitPanel { } }) .child( - self.entry_label(display_name.clone(), label_color) + self.entry_label(display_name, label_color) .when(status.is_deleted(), |this| this.strikethrough()), ), ) @@ -4396,7 +4476,7 @@ fn current_language_model(cx: &Context<'_, GitPanel>) -> Option) -> impl IntoElement { let project = self.project.read(cx); - let has_entries = self.entries.len() > 0; + let has_entries = !self.entries.is_empty(); let room = self .workspace .upgrade() @@ -4404,7 +4484,7 @@ impl Render for GitPanel { let has_write_access = self.has_write_access(cx); - let has_co_authors = room.map_or(false, |room| { + let has_co_authors = room.is_some_and(|room| { self.load_local_committer(cx); let room = room.read(cx); room.remote_participants() @@ -4524,7 +4604,7 @@ impl editor::Addon for GitPanelAddon { git_panel .read(cx) - .render_buffer_header_controls(&git_panel, &file, window, cx) + .render_buffer_header_controls(&git_panel, file, window, cx) } } @@ -4561,7 +4641,7 @@ impl Panel for GitPanel { } fn icon(&self, _: &Window, cx: &App) -> Option { - Some(ui::IconName::GitBranchSmall).filter(|_| GitPanelSettings::get_global(cx).button) + Some(ui::IconName::GitBranchAlt).filter(|_| GitPanelSettings::get_global(cx).button) } fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> { @@ -4607,7 +4687,7 @@ impl GitPanelMessageTooltip { author_email: details.author_email.clone(), commit_time: OffsetDateTime::from_unix_timestamp(details.commit_timestamp)?, message: Some(ParsedCommitMessage { - message: details.message.clone(), + message: details.message, ..Default::default() }), }; @@ -4720,12 +4800,10 @@ impl RenderOnce for PanelRepoFooter { // ideally, show the whole branch and repo names but // when we can't, use a budget to allocate space between the two - let (repo_display_len, branch_display_len) = if branch_actual_len + repo_actual_len - <= LABEL_CHARACTER_BUDGET - { - (repo_actual_len, branch_actual_len) - } else { - if branch_actual_len <= MAX_BRANCH_LEN { + let (repo_display_len, branch_display_len) = + if branch_actual_len + repo_actual_len <= LABEL_CHARACTER_BUDGET { + (repo_actual_len, branch_actual_len) + } else if branch_actual_len <= MAX_BRANCH_LEN { let repo_space = (LABEL_CHARACTER_BUDGET - branch_actual_len).min(MAX_REPO_LEN); (repo_space, branch_actual_len) } else if repo_actual_len <= MAX_REPO_LEN { @@ -4733,8 +4811,7 @@ impl RenderOnce for PanelRepoFooter { (repo_actual_len, branch_space) } else { (MAX_REPO_LEN, MAX_BRANCH_LEN) - } - }; + }; let truncated_repo_name = if repo_actual_len <= repo_display_len { active_repo_name.to_string() @@ -4743,7 +4820,7 @@ impl RenderOnce for PanelRepoFooter { }; let truncated_branch_name = if branch_actual_len <= branch_display_len { - branch_name.to_string() + branch_name } else { util::truncate_and_trailoff(branch_name.trim_ascii(), branch_display_len) }; @@ -4756,7 +4833,7 @@ impl RenderOnce for PanelRepoFooter { let repo_selector = PopoverMenu::new("repository-switcher") .menu({ - let project = project.clone(); + let project = project; move |window, cx| { let project = project.clone()?; Some(cx.new(|cx| RepositorySelector::new(project, rems(16.), window, cx))) @@ -4808,7 +4885,7 @@ impl RenderOnce for PanelRepoFooter { .items_center() .child( div().child( - Icon::new(IconName::GitBranchSmall) + Icon::new(IconName::GitBranchAlt) .size(IconSize::Small) .color(if single_repo { Color::Disabled @@ -4927,10 +5004,7 @@ impl Component for PanelRepoFooter { div() .w(example_width) .overflow_hidden() - .child(PanelRepoFooter::new_preview( - active_repository(1).clone(), - None, - )) + .child(PanelRepoFooter::new_preview(active_repository(1), None)) .into_any_element(), ), single_example( @@ -4939,7 +5013,7 @@ impl Component for PanelRepoFooter { .w(example_width) .overflow_hidden() .child(PanelRepoFooter::new_preview( - active_repository(2).clone(), + active_repository(2), Some(branch(unknown_upstream)), )) .into_any_element(), @@ -4950,7 +5024,7 @@ impl Component for PanelRepoFooter { .w(example_width) .overflow_hidden() .child(PanelRepoFooter::new_preview( - active_repository(3).clone(), + active_repository(3), Some(branch(no_remote_upstream)), )) .into_any_element(), @@ -4961,7 +5035,7 @@ impl Component for PanelRepoFooter { .w(example_width) .overflow_hidden() .child(PanelRepoFooter::new_preview( - active_repository(4).clone(), + active_repository(4), Some(branch(not_ahead_or_behind_upstream)), )) .into_any_element(), @@ -4972,7 +5046,7 @@ impl Component for PanelRepoFooter { .w(example_width) .overflow_hidden() .child(PanelRepoFooter::new_preview( - active_repository(5).clone(), + active_repository(5), Some(branch(behind_upstream)), )) .into_any_element(), @@ -4983,7 +5057,7 @@ impl Component for PanelRepoFooter { .w(example_width) .overflow_hidden() .child(PanelRepoFooter::new_preview( - active_repository(6).clone(), + active_repository(6), Some(branch(ahead_of_upstream)), )) .into_any_element(), @@ -4994,7 +5068,7 @@ impl Component for PanelRepoFooter { .w(example_width) .overflow_hidden() .child(PanelRepoFooter::new_preview( - active_repository(7).clone(), + active_repository(7), Some(branch(ahead_and_behind_upstream)), )) .into_any_element(), @@ -5165,7 +5239,7 @@ mod tests { project .read(cx) .worktrees(cx) - .nth(0) + .next() .unwrap() .read(cx) .as_local() @@ -5290,7 +5364,7 @@ mod tests { project .read(cx) .worktrees(cx) - .nth(0) + .next() .unwrap() .read(cx) .as_local() @@ -5341,7 +5415,7 @@ mod tests { project .read(cx) .worktrees(cx) - .nth(0) + .next() .unwrap() .read(cx) .as_local() @@ -5390,7 +5464,7 @@ mod tests { project .read(cx) .worktrees(cx) - .nth(0) + .next() .unwrap() .read(cx) .as_local() diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 0163175eda0e8135e9aa77d80e8eb4bce0184cf0..5369b8b404ba7005bf738f4515d20b3fe4163a48 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -10,14 +10,17 @@ use git::{ status::{FileStatus, StatusCode, UnmergedStatus, UnmergedStatusCode}, }; use git_panel_settings::GitPanelSettings; -use gpui::{Action, App, Context, FocusHandle, Window, actions}; +use gpui::{ + Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Window, + actions, +}; use onboarding::GitOnboardingModal; use project_diff::ProjectDiff; use ui::prelude::*; -use workspace::Workspace; +use workspace::{ModalView, Workspace}; use zed_actions; -use crate::text_diff_view::TextDiffView; +use crate::{git_panel::GitPanel, text_diff_view::TextDiffView}; mod askpass_modal; pub mod branch_picker; @@ -169,6 +172,15 @@ pub fn init(cx: &mut App) { panel.git_init(window, cx); }); }); + workspace.register_action(|workspace, _action: &git::Clone, window, cx| { + let Some(panel) = workspace.panel::(cx) else { + return; + }; + + workspace.toggle_modal(window, cx, |window, cx| { + GitCloneModal::show(panel, window, cx) + }); + }); workspace.register_action(|workspace, _: &git::OpenModifiedFiles, window, cx| { open_modified_files(workspace, window, cx); }); @@ -233,12 +245,12 @@ fn render_remote_button( } (0, 0) => None, (ahead, 0) => Some(remote_button::render_push_button( - keybinding_target.clone(), + keybinding_target, id, ahead, )), (ahead, behind) => Some(remote_button::render_pull_button( - keybinding_target.clone(), + keybinding_target, id, ahead, behind, @@ -356,7 +368,7 @@ mod remote_button { "Publish", 0, 0, - Some(IconName::ArrowUpFromLine), + Some(IconName::ExpandUp), keybinding_target.clone(), move |_, window, cx| { window.dispatch_action(Box::new(git::Push), cx); @@ -383,7 +395,7 @@ mod remote_button { "Republish", 0, 0, - Some(IconName::ArrowUpFromLine), + Some(IconName::ExpandUp), keybinding_target.clone(), move |_, window, cx| { window.dispatch_action(Box::new(git::Push), cx); @@ -413,16 +425,9 @@ mod remote_button { let command = command.into(); if let Some(handle) = focus_handle { - Tooltip::with_meta_in( - label.clone(), - Some(action), - command.clone(), - &handle, - window, - cx, - ) + Tooltip::with_meta_in(label, Some(action), command, &handle, window, cx) } else { - Tooltip::with_meta(label.clone(), Some(action), command.clone(), window, cx) + Tooltip::with_meta(label, Some(action), command, window, cx) } } @@ -438,14 +443,14 @@ mod remote_button { .child( div() .px_1() - .child(Icon::new(IconName::ChevronDownSmall).size(IconSize::XSmall)), + .child(Icon::new(IconName::ChevronDown).size(IconSize::XSmall)), ), ) .menu(move |window, cx| { Some(ContextMenu::build(window, cx, |context_menu, _, _| { context_menu .when_some(keybinding_target.clone(), |el, keybinding_target| { - el.context(keybinding_target.clone()) + el.context(keybinding_target) }) .action("Fetch", git::Fetch.boxed_clone()) .action("Fetch From", git::FetchFrom.boxed_clone()) @@ -613,3 +618,88 @@ impl Component for GitStatusIcon { ) } } + +struct GitCloneModal { + panel: Entity, + repo_input: Entity, + focus_handle: FocusHandle, +} + +impl GitCloneModal { + pub fn show(panel: Entity, window: &mut Window, cx: &mut Context) -> Self { + let repo_input = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Enter repository URL…", cx); + editor + }); + let focus_handle = repo_input.focus_handle(cx); + + window.focus(&focus_handle); + + Self { + panel, + repo_input, + focus_handle, + } + } +} + +impl Focusable for GitCloneModal { + fn focus_handle(&self, _: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for GitCloneModal { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + div() + .elevation_3(cx) + .w(rems(34.)) + .flex_1() + .overflow_hidden() + .child( + div() + .w_full() + .p_2() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(self.repo_input.clone()), + ) + .child( + h_flex() + .w_full() + .p_2() + .gap_0p5() + .rounded_b_sm() + .bg(cx.theme().colors().editor_background) + .child( + Label::new("Clone a repository from GitHub or other sources.") + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child( + Button::new("learn-more", "Learn More") + .label_size(LabelSize::Small) + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .on_click(|_, _, cx| { + cx.open_url("https://github.com/git-guides/git-clone"); + }), + ), + ) + .on_action(cx.listener(|_, _: &menu::Cancel, _, cx| { + cx.emit(DismissEvent); + })) + .on_action(cx.listener(|this, _: &menu::Confirm, window, cx| { + let repo = this.repo_input.read(cx).text(cx); + this.panel.update(cx, |panel, cx| { + panel.git_clone(repo, window, cx); + }); + cx.emit(DismissEvent); + })) + } +} + +impl EventEmitter for GitCloneModal {} + +impl ModalView for GitCloneModal {} diff --git a/crates/git_ui/src/onboarding.rs b/crates/git_ui/src/onboarding.rs index d721b21a2aca2d70f8177ca8df87b9025feebfb3..d1709e043b92216e974c1a4f451db5c28b98f773 100644 --- a/crates/git_ui/src/onboarding.rs +++ b/crates/git_ui/src/onboarding.rs @@ -110,7 +110,7 @@ impl Render for GitOnboardingModal { .child(Headline::new("Native Git Support").size(HeadlineSize::Large)), ) .child(h_flex().absolute().top_2().right_2().child( - IconButton::new("cancel", IconName::X).on_click(cx.listener( + IconButton::new("cancel", IconName::Close).on_click(cx.listener( |_, _: &ClickEvent, _window, cx| { git_onboarding_event!("Cancelled", trigger = "X click"); cx.emit(DismissEvent); diff --git a/crates/git_ui/src/picker_prompt.rs b/crates/git_ui/src/picker_prompt.rs index 4077e0f3623e0925a87824e252a77755c78721ea..3f1d507c42f71eddaef4b2b8a114a84f2aabf875 100644 --- a/crates/git_ui/src/picker_prompt.rs +++ b/crates/git_ui/src/picker_prompt.rs @@ -152,7 +152,7 @@ impl PickerDelegate for PickerPromptDelegate { .all_options .iter() .enumerate() - .map(|(ix, option)| StringMatchCandidate::new(ix, &option)) + .map(|(ix, option)| StringMatchCandidate::new(ix, option)) .collect::>() }); let Some(candidates) = candidates.log_err() else { diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index d6a4e27286af1bb38dcd1acc488bce9da1813a42..524dbf13d30e4539dcc80ec37625333a37cc2206 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -242,7 +242,7 @@ impl ProjectDiff { TRACKED_NAMESPACE }; - let path_key = PathKey::namespaced(namespace, entry.repo_path.0.clone()); + let path_key = PathKey::namespaced(namespace, entry.repo_path.0); self.move_to_path(path_key, window, cx) } @@ -280,7 +280,7 @@ impl ProjectDiff { fn button_states(&self, cx: &App) -> ButtonStates { let editor = self.editor.read(cx); let snapshot = self.multibuffer.read(cx).snapshot(cx); - let prev_next = snapshot.diff_hunks().skip(1).next().is_some(); + let prev_next = snapshot.diff_hunks().nth(1).is_some(); let mut selection = true; let mut ranges = editor @@ -329,14 +329,14 @@ impl ProjectDiff { }) .ok(); - return ButtonStates { + ButtonStates { stage: has_unstaged_hunks, unstage: has_staged_hunks, prev_next, selection, stage_all, unstage_all, - }; + } } fn handle_editor_event( @@ -346,27 +346,24 @@ impl ProjectDiff { window: &mut Window, cx: &mut Context, ) { - match event { - EditorEvent::SelectionsChanged { local: true } => { - let Some(project_path) = self.active_path(cx) else { - return; - }; - self.workspace - .update(cx, |workspace, cx| { - if let Some(git_panel) = workspace.panel::(cx) { - git_panel.update(cx, |git_panel, cx| { - git_panel.select_entry_by_path(project_path, window, cx) - }) - } - }) - .ok(); - } - _ => {} + if let EditorEvent::SelectionsChanged { local: true } = event { + let Some(project_path) = self.active_path(cx) else { + return; + }; + self.workspace + .update(cx, |workspace, cx| { + if let Some(git_panel) = workspace.panel::(cx) { + git_panel.update(cx, |git_panel, cx| { + git_panel.select_entry_by_path(project_path, window, cx) + }) + } + }) + .ok(); } - if editor.focus_handle(cx).contains_focused(window, cx) { - if self.multibuffer.read(cx).is_empty() { - self.focus_handle.focus(window) - } + if editor.focus_handle(cx).contains_focused(window, cx) + && self.multibuffer.read(cx).is_empty() + { + self.focus_handle.focus(window) } } @@ -451,10 +448,10 @@ impl ProjectDiff { let diff = diff.read(cx); let diff_hunk_ranges = diff .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx) - .map(|diff_hunk| diff_hunk.buffer_range.clone()); + .map(|diff_hunk| diff_hunk.buffer_range); let conflicts = conflict_addon .conflict_set(snapshot.remote_id()) - .map(|conflict_set| conflict_set.read(cx).snapshot().conflicts.clone()) + .map(|conflict_set| conflict_set.read(cx).snapshot().conflicts) .unwrap_or_default(); let conflicts = conflicts.iter().map(|conflict| conflict.range.clone()); @@ -513,7 +510,7 @@ impl ProjectDiff { mut recv: postage::watch::Receiver<()>, cx: &mut AsyncWindowContext, ) -> Result<()> { - while let Some(_) = recv.next().await { + while (recv.next().await).is_some() { let buffers_to_load = this.update(cx, |this, cx| this.load_buffers(cx))?; for buffer_to_load in buffers_to_load { if let Some(buffer) = buffer_to_load.await.log_err() { @@ -740,7 +737,7 @@ impl Render for ProjectDiff { } else { None }; - let keybinding_focus_handle = self.focus_handle(cx).clone(); + let keybinding_focus_handle = self.focus_handle(cx); el.child( v_flex() .gap_1() @@ -1073,8 +1070,7 @@ pub struct ProjectDiffEmptyState { impl RenderOnce for ProjectDiffEmptyState { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { let status_against_remote = |ahead_by: usize, behind_by: usize| -> bool { - match self.current_branch { - Some(Branch { + matches!(self.current_branch, Some(Branch { upstream: Some(Upstream { tracking: @@ -1084,9 +1080,7 @@ impl RenderOnce for ProjectDiffEmptyState { .. }), .. - }) if (ahead > 0) == (ahead_by > 0) && (behind > 0) == (behind_by > 0) => true, - _ => false, - } + }) if (ahead > 0) == (ahead_by > 0) && (behind > 0) == (behind_by > 0)) }; let change_count = |current_branch: &Branch| -> (usize, usize) { @@ -1173,7 +1167,7 @@ impl RenderOnce for ProjectDiffEmptyState { .child(Label::new("No Changes").color(Color::Muted)) } else { this.when_some(self.current_branch.as_ref(), |this, branch| { - this.child(has_branch_container(&branch)) + this.child(has_branch_container(branch)) }) } }), @@ -1332,14 +1326,14 @@ fn merge_anchor_ranges<'a>( loop { if let Some(left_range) = left .peek() - .filter(|range| range.start.cmp(&next_range.end, &snapshot).is_le()) + .filter(|range| range.start.cmp(&next_range.end, snapshot).is_le()) .cloned() { left.next(); next_range.end = left_range.end; } else if let Some(right_range) = right .peek() - .filter(|range| range.start.cmp(&next_range.end, &snapshot).is_le()) + .filter(|range| range.start.cmp(&next_range.end, snapshot).is_le()) .cloned() { right.next(); diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 005c1e18b40727f42df81437c7038f4e5a7ef905..ebf32d1b994814fa277201176b555efed5e85e66 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -48,7 +48,7 @@ impl TextDiffView { let selection_data = source_editor.update(cx, |editor, cx| { let multibuffer = editor.buffer().read(cx); - let source_buffer = multibuffer.as_singleton()?.clone(); + let source_buffer = multibuffer.as_singleton()?; let selections = editor.selections.all::(cx); let buffer_snapshot = source_buffer.read(cx); let first_selection = selections.first()?; @@ -207,7 +207,7 @@ impl TextDiffView { path: Some(format!("Clipboard ↔ {selection_location_path}").into()), buffer_changes_tx, _recalculate_diff_task: cx.spawn(async move |_, cx| { - while let Ok(_) = buffer_changes_rx.recv().await { + while buffer_changes_rx.recv().await.is_ok() { loop { let mut timer = cx .background_executor() @@ -259,7 +259,7 @@ async fn update_diff_buffer( let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; - let base_text = base_buffer_snapshot.text().to_string(); + let base_text = base_buffer_snapshot.text(); let diff_snapshot = cx .update(|cx| { @@ -686,7 +686,7 @@ mod tests { let project = Project::test(fs, [project_root.as_ref()], cx).await; - let (workspace, mut cx) = + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); let buffer = project @@ -725,7 +725,7 @@ mod tests { assert_state_with_diff( &diff_view.read_with(cx, |diff_view, _| diff_view.diff_editor.clone()), - &mut cx, + cx, expected_diff, ); diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 29064eb29cb986187b9d86046fd3d78cd2f63451..e60a3651aae3f062b16fdfa7aa01a28e5c845e85 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -1,4 +1,4 @@ -use editor::{Editor, MultiBufferSnapshot}; +use editor::{Editor, EditorSettings, MultiBufferSnapshot}; use gpui::{App, Entity, FocusHandle, Focusable, Subscription, Task, WeakEntity}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -95,10 +95,8 @@ impl CursorPosition { .ok() .unwrap_or(true); - if !is_singleton { - if let Some(debounce) = debounce { - cx.background_executor().timer(debounce).await; - } + if !is_singleton && let Some(debounce) = debounce { + cx.background_executor().timer(debounce).await; } editor @@ -108,7 +106,7 @@ impl CursorPosition { cursor_position.selected_count.selections = editor.selections.count(); match editor.mode() { editor::EditorMode::AutoHeight { .. } - | editor::EditorMode::SingleLine { .. } + | editor::EditorMode::SingleLine | editor::EditorMode::Minimap { .. } => { cursor_position.position = None; cursor_position.context = None; @@ -131,7 +129,7 @@ impl CursorPosition { cursor_position.selected_count.lines += 1; } } - if last_selection.as_ref().map_or(true, |last_selection| { + if last_selection.as_ref().is_none_or(|last_selection| { selection.id > last_selection.id }) { last_selection = Some(selection); @@ -209,6 +207,13 @@ impl CursorPosition { impl Render for CursorPosition { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + if !EditorSettings::get_global(cx) + .status_bar + .cursor_position_button + { + return div(); + } + div().when_some(self.position, |el, position| { let mut text = format!( "{}{FILE_ROW_COLUMN_DELIMITER}{}", @@ -227,13 +232,11 @@ impl Render for CursorPosition { if let Some(editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) + && let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { - if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) - { - workspace.toggle_modal(window, cx, |window, cx| { - crate::GoToLine::new(editor, buffer, window, cx) - }) - } + workspace.toggle_modal(window, cx, |window, cx| { + crate::GoToLine::new(editor, buffer, window, cx) + }) } }); } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 1ac933e316bcde24384139c851a8bedb63388611..2afc72e989b1a112c481d1c1438d216ececec626 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -103,11 +103,11 @@ impl GoToLine { return; }; editor.update(cx, |editor, cx| { - if let Some(placeholder_text) = editor.placeholder_text() { - if editor.text(cx).is_empty() { - let placeholder_text = placeholder_text.to_string(); - editor.set_text(placeholder_text, window, cx); - } + if let Some(placeholder_text) = editor.placeholder_text() + && editor.text(cx).is_empty() + { + let placeholder_text = placeholder_text.to_string(); + editor.set_text(placeholder_text, window, cx); } }); } @@ -157,7 +157,7 @@ impl GoToLine { self.prev_scroll_position.take(); cx.emit(DismissEvent) } - editor::EditorEvent::BufferEdited { .. } => self.highlight_current_line(cx), + editor::EditorEvent::BufferEdited => self.highlight_current_line(cx), _ => {} } } @@ -712,7 +712,7 @@ mod tests { ) -> Entity { cx.dispatch_action(editor::actions::ToggleGoToLine); workspace.update(cx, |workspace, cx| { - workspace.active_modal::(cx).unwrap().clone() + workspace.active_modal::(cx).unwrap() }) } diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index dfa51d024c46c2acc15744cd366c2fd723d59046..ca0aa309b1296e021402d921b7a7809f1e593e2b 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -106,10 +106,9 @@ pub fn validate_generate_content_request(request: &GenerateContentRequest) -> Re .contents .iter() .find(|content| content.role == Role::User) + && user_content.parts.is_empty() { - if user_content.parts.is_empty() { - bail!("User content must contain at least one part"); - } + bail!("User content must contain at least one part"); } Ok(()) @@ -267,7 +266,7 @@ pub struct CitationMetadata { pub struct PromptFeedback { #[serde(skip_serializing_if = "Option::is_none")] pub block_reason: Option, - pub safety_ratings: Vec, + pub safety_ratings: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub block_reason_message: Option, } @@ -478,10 +477,10 @@ impl<'de> Deserialize<'de> for ModelName { model_id: id.to_string(), }) } else { - return Err(serde::de::Error::custom(format!( + Err(serde::de::Error::custom(format!( "Expected model name to begin with {}, got: {}", MODEL_NAME_PREFIX, string - ))); + ))) } } } diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 6e5a76d44116d89756918605204b70fd4cbd54cd..9f5b66087da1110f50ac08d9106ec960e2f965aa 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -119,6 +119,7 @@ serde_json.workspace = true slotmap = "1.0.6" smallvec.workspace = true smol.workspace = true +stacksafe.workspace = true strum.workspace = true sum_tree.workspace = true taffy = "=0.9.0" @@ -209,7 +210,7 @@ xkbcommon = { version = "0.8.0", features = [ "wayland", "x11", ], optional = true } -xim = { git = "https://github.com/XDeme1/xim-rs", rev = "d50d461764c2213655cd9cf65a0ea94c70d3c4fd", features = [ +xim = { git = "https://github.com/zed-industries/xim-rs", rev = "c0a70c1bd2ce197364216e5e818a2cb3adb99a8d" , features = [ "x11rb-xcb", "x11rb-client", ], optional = true } @@ -305,3 +306,7 @@ path = "examples/uniform_list.rs" [[example]] name = "window_shadow" path = "examples/window_shadow.rs" + +[[example]] +name = "grid_layout" +path = "examples/grid_layout.rs" diff --git a/crates/gpui/build.rs b/crates/gpui/build.rs index 93a1c15c41dd173a35ffc0adf06af6c449809890..0040046f90554ecd3cb4c010faf18e7b98b62159 100644 --- a/crates/gpui/build.rs +++ b/crates/gpui/build.rs @@ -327,10 +327,10 @@ mod windows { /// You can set the `GPUI_FXC_PATH` environment variable to specify the path to the fxc.exe compiler. fn find_fxc_compiler() -> String { // Check environment variable - if let Ok(path) = std::env::var("GPUI_FXC_PATH") { - if Path::new(&path).exists() { - return path; - } + if let Ok(path) = std::env::var("GPUI_FXC_PATH") + && Path::new(&path).exists() + { + return path; } // Try to find in PATH @@ -338,11 +338,10 @@ mod windows { if let Ok(output) = std::process::Command::new("where.exe") .arg("fxc.exe") .output() + && output.status.success() { - if output.status.success() { - let path = String::from_utf8_lossy(&output.stdout); - return path.trim().to_string(); - } + let path = String::from_utf8_lossy(&output.stdout); + return path.trim().to_string(); } // Check the default path @@ -374,7 +373,7 @@ mod windows { shader_path, "vs_4_1", ); - generate_rust_binding(&const_name, &output_file, &rust_binding_path); + generate_rust_binding(&const_name, &output_file, rust_binding_path); // Compile fragment shader let output_file = format!("{}/{}_ps.h", out_dir, module); @@ -387,7 +386,7 @@ mod windows { shader_path, "ps_4_1", ); - generate_rust_binding(&const_name, &output_file, &rust_binding_path); + generate_rust_binding(&const_name, &output_file, rust_binding_path); } fn compile_shader_impl( diff --git a/crates/gpui/examples/grid_layout.rs b/crates/gpui/examples/grid_layout.rs new file mode 100644 index 0000000000000000000000000000000000000000..f28549757838c8afa65ffebb9ad94dcdfa9bd837 --- /dev/null +++ b/crates/gpui/examples/grid_layout.rs @@ -0,0 +1,80 @@ +use gpui::{ + App, Application, Bounds, Context, Hsla, Window, WindowBounds, WindowOptions, div, prelude::*, + px, rgb, size, +}; + +// https://en.wikipedia.org/wiki/Holy_grail_(web_design) +struct HolyGrailExample {} + +impl Render for HolyGrailExample { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + let block = |color: Hsla| { + div() + .size_full() + .bg(color) + .border_1() + .border_dashed() + .rounded_md() + .border_color(gpui::white()) + .items_center() + }; + + div() + .gap_1() + .grid() + .bg(rgb(0x505050)) + .size(px(500.0)) + .shadow_lg() + .border_1() + .size_full() + .grid_cols(5) + .grid_rows(5) + .child( + block(gpui::white()) + .row_span(1) + .col_span_full() + .child("Header"), + ) + .child( + block(gpui::red()) + .col_span(1) + .h_56() + .child("Table of contents"), + ) + .child( + block(gpui::green()) + .col_span(3) + .row_span(3) + .child("Content"), + ) + .child( + block(gpui::blue()) + .col_span(1) + .row_span(3) + .child("AD :(") + .text_color(gpui::white()), + ) + .child( + block(gpui::black()) + .row_span(1) + .col_span_full() + .text_color(gpui::white()) + .child("Footer"), + ) + } +} + +fn main() { + Application::new().run(|cx: &mut App| { + let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |_, cx| cx.new(|_| HolyGrailExample {}), + ) + .unwrap(); + cx.activate(true); + }); +} diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index 52a5b08b967927ef709dffc1e21c4075e4cdc5df..37115feaa551a787562e7299c9d44bcc97b5fca3 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -137,14 +137,14 @@ impl TextInput { fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context) { if !self.selected_range.is_empty() { cx.write_to_clipboard(ClipboardItem::new_string( - (&self.content[self.selected_range.clone()]).to_string(), + self.content[self.selected_range.clone()].to_string(), )); } } fn cut(&mut self, _: &Cut, window: &mut Window, cx: &mut Context) { if !self.selected_range.is_empty() { cx.write_to_clipboard(ClipboardItem::new_string( - (&self.content[self.selected_range.clone()]).to_string(), + self.content[self.selected_range.clone()].to_string(), )); self.replace_text_in_range(None, "", window, cx) } @@ -446,7 +446,7 @@ impl Element for TextElement { let (display_text, text_color) = if content.is_empty() { (input.placeholder.clone(), hsla(0., 0., 0., 0.2)) } else { - (content.clone(), style.color) + (content, style.color) }; let run = TextRun { @@ -474,7 +474,7 @@ impl Element for TextElement { }, TextRun { len: display_text.len() - marked_range.end, - ..run.clone() + ..run }, ] .into_iter() @@ -549,10 +549,10 @@ impl Element for TextElement { line.paint(bounds.origin, window.line_height(), window, cx) .unwrap(); - if focus_handle.is_focused(window) { - if let Some(cursor) = prepaint.cursor.take() { - window.paint_quad(cursor); - } + if focus_handle.is_focused(window) + && let Some(cursor) = prepaint.cursor.take() + { + window.paint_quad(cursor); } self.input.update(cx, |input, _cx| { @@ -595,9 +595,7 @@ impl Render for TextInput { .w_full() .p(px(4.)) .bg(white()) - .child(TextElement { - input: cx.entity().clone(), - }), + .child(TextElement { input: cx.entity() }), ) } } diff --git a/crates/gpui/examples/set_menus.rs b/crates/gpui/examples/set_menus.rs index f53fff7c7f7dfca1d2e44faf39347d1716ddad1c..8a97a8d8a2ce8135b1af0e981a186586c2d5ebb8 100644 --- a/crates/gpui/examples/set_menus.rs +++ b/crates/gpui/examples/set_menus.rs @@ -1,5 +1,6 @@ use gpui::{ - App, Application, Context, Menu, MenuItem, Window, WindowOptions, actions, div, prelude::*, rgb, + App, Application, Context, Menu, MenuItem, SystemMenuType, Window, WindowOptions, actions, div, + prelude::*, rgb, }; struct SetMenus; @@ -27,7 +28,11 @@ fn main() { // Add menu items cx.set_menus(vec![Menu { name: "set_menus".into(), - items: vec![MenuItem::action("Quit", Quit)], + items: vec![ + MenuItem::os_submenu("Services", SystemMenuType::Services), + MenuItem::separator(), + MenuItem::action("Quit", Quit), + ], }]); cx.open_window(WindowOptions::default(), |_, cx| cx.new(|_| SetMenus {})) .unwrap(); diff --git a/crates/gpui/examples/text.rs b/crates/gpui/examples/text.rs index 1166bb279541c80eb8686b59c85724b4068895ed..66e9cff0aa9773d99b412ea7249ca64ea103b138 100644 --- a/crates/gpui/examples/text.rs +++ b/crates/gpui/examples/text.rs @@ -155,7 +155,7 @@ impl RenderOnce for Specimen { .text_size(px(font_size * scale)) .line_height(relative(line_height)) .p(px(10.0)) - .child(self.string.clone()) + .child(self.string) } } diff --git a/crates/gpui/src/action.rs b/crates/gpui/src/action.rs index b179076cd5f0da826ca0d5da5e2a5a41cbb5e806..0b824fec34aee7abcb2dbba285265c79b6851d16 100644 --- a/crates/gpui/src/action.rs +++ b/crates/gpui/src/action.rs @@ -73,18 +73,18 @@ macro_rules! actions { /// - `name = "ActionName"` overrides the action's name. This must not contain `::`. /// /// - `no_json` causes the `build` method to always error and `action_json_schema` to return `None`, -/// and allows actions not implement `serde::Serialize` and `schemars::JsonSchema`. +/// and allows actions not implement `serde::Serialize` and `schemars::JsonSchema`. /// /// - `no_register` skips registering the action. This is useful for implementing the `Action` trait -/// while not supporting invocation by name or JSON deserialization. +/// while not supporting invocation by name or JSON deserialization. /// /// - `deprecated_aliases = ["editor::SomeAction"]` specifies deprecated old names for the action. -/// These action names should *not* correspond to any actions that are registered. These old names -/// can then still be used to refer to invoke this action. In Zed, the keymap JSON schema will -/// accept these old names and provide warnings. +/// These action names should *not* correspond to any actions that are registered. These old names +/// can then still be used to refer to invoke this action. In Zed, the keymap JSON schema will +/// accept these old names and provide warnings. /// /// - `deprecated = "Message about why this action is deprecation"` specifies a deprecation message. -/// In Zed, the keymap JSON schema will cause this to be displayed as a warning. +/// In Zed, the keymap JSON schema will cause this to be displayed as a warning. /// /// # Manual Implementation /// diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ded7bae3164ab5b76568290b7335599dd720c320..bbd59fa7bc1276bedac8a17e9fe947a7211172eb 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -277,6 +277,8 @@ pub struct App { pub(crate) release_listeners: SubscriberSet, pub(crate) global_observers: SubscriberSet, pub(crate) quit_observers: SubscriberSet<(), QuitHandler>, + pub(crate) restart_observers: SubscriberSet<(), Handler>, + pub(crate) restart_path: Option, pub(crate) window_closed_observers: SubscriberSet<(), WindowClosedHandler>, pub(crate) layout_id_buffer: Vec, // We recycle this memory across layout requests. pub(crate) propagate_event: bool, @@ -349,6 +351,8 @@ impl App { keyboard_layout_observers: SubscriberSet::new(), global_observers: SubscriberSet::new(), quit_observers: SubscriberSet::new(), + restart_observers: SubscriberSet::new(), + restart_path: None, window_closed_observers: SubscriberSet::new(), layout_id_buffer: Default::default(), propagate_event: true, @@ -364,7 +368,7 @@ impl App { }), }); - init_app_menus(platform.as_ref(), &mut app.borrow_mut()); + init_app_menus(platform.as_ref(), &app.borrow()); platform.on_keyboard_layout_change(Box::new({ let app = Rc::downgrade(&app); @@ -812,8 +816,9 @@ impl App { pub fn prompt_for_new_path( &self, directory: &Path, + suggested_name: Option<&str>, ) -> oneshot::Receiver>> { - self.platform.prompt_for_new_path(directory) + self.platform.prompt_for_new_path(directory, suggested_name) } /// Reveals the specified path at the platform level, such as in Finder on macOS. @@ -832,8 +837,16 @@ impl App { } /// Restarts the application. - pub fn restart(&self, binary_path: Option) { - self.platform.restart(binary_path) + pub fn restart(&mut self) { + self.restart_observers + .clone() + .retain(&(), |observer| observer(self)); + self.platform.restart(self.restart_path.take()) + } + + /// Sets the path to use when restarting the application. + pub fn set_restart_path(&mut self, path: PathBuf) { + self.restart_path = Some(path); } /// Returns the HTTP client for the application. @@ -1297,7 +1310,7 @@ impl App { T: 'static, { let window_handle = window.handle; - self.observe_release(&handle, move |entity, cx| { + self.observe_release(handle, move |entity, cx| { let _ = window_handle.update(cx, |_, window, cx| on_release(entity, window, cx)); }) } @@ -1319,7 +1332,7 @@ impl App { } inner( - &mut self.keystroke_observers, + &self.keystroke_observers, Box::new(move |event, window, cx| { f(event, window, cx); true @@ -1345,7 +1358,7 @@ impl App { } inner( - &mut self.keystroke_interceptors, + &self.keystroke_interceptors, Box::new(move |event, window, cx| { f(event, window, cx); true @@ -1466,6 +1479,21 @@ impl App { subscription } + /// Register a callback to be invoked when the application is about to restart. + /// + /// These callbacks are called before any `on_app_quit` callbacks. + pub fn on_app_restart(&self, mut on_restart: impl 'static + FnMut(&mut App)) -> Subscription { + let (subscription, activate) = self.restart_observers.insert( + (), + Box::new(move |cx| { + on_restart(cx); + true + }), + ); + activate(); + subscription + } + /// Register a callback to be invoked when a window is closed /// The window is no longer accessible at the point this callback is invoked. pub fn on_window_closed(&self, mut on_closed: impl FnMut(&mut App) + 'static) -> Subscription { @@ -1488,12 +1516,11 @@ impl App { /// the bindings in the element tree, and any global action listeners. pub fn is_action_available(&mut self, action: &dyn Action) -> bool { let mut action_available = false; - if let Some(window) = self.active_window() { - if let Ok(window_action_available) = + if let Some(window) = self.active_window() + && let Ok(window_action_available) = window.update(self, |_, window, cx| window.is_action_available(action, cx)) - { - action_available = window_action_available; - } + { + action_available = window_action_available; } action_available @@ -1578,27 +1605,26 @@ impl App { .insert(action.as_any().type_id(), global_listeners); } - if self.propagate_event { - if let Some(mut global_listeners) = self + if self.propagate_event + && let Some(mut global_listeners) = self .global_action_listeners .remove(&action.as_any().type_id()) - { - for listener in global_listeners.iter().rev() { - listener(action.as_any(), DispatchPhase::Bubble, self); - if !self.propagate_event { - break; - } + { + for listener in global_listeners.iter().rev() { + listener(action.as_any(), DispatchPhase::Bubble, self); + if !self.propagate_event { + break; } + } - global_listeners.extend( - self.global_action_listeners - .remove(&action.as_any().type_id()) - .unwrap_or_default(), - ); - + global_listeners.extend( self.global_action_listeners - .insert(action.as_any().type_id(), global_listeners); - } + .remove(&action.as_any().type_id()) + .unwrap_or_default(), + ); + + self.global_action_listeners + .insert(action.as_any().type_id(), global_listeners); } } @@ -1681,8 +1707,8 @@ impl App { .unwrap_or_else(|| { is_first = true; let future = A::load(source.clone(), self); - let task = self.background_executor().spawn(future).shared(); - task + + self.background_executor().spawn(future).shared() }); self.loading_assets.insert(asset_id, Box::new(task.clone())); @@ -1889,7 +1915,7 @@ impl AppContext for App { G: Global, { let mut g = self.global::(); - callback(&g, self) + callback(g, self) } } diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index d9d21c024461cab68d62d685a40b61c9c74d46dd..5eb436290415ed91c89ae85964bbb5093faa38f3 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -465,7 +465,7 @@ impl VisualContext for AsyncWindowContext { V: Focusable, { self.window.update(self, |_, window, cx| { - view.read(cx).focus_handle(cx).clone().focus(window); + view.read(cx).focus_handle(cx).focus(window); }) } } diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index 392be2ffe9ce4eed9397a11770b7133db145a7a8..1112878a66b07c133031086c3b14aa8427617bea 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -164,6 +164,20 @@ impl<'a, T: 'static> Context<'a, T> { subscription } + /// Register a callback to be invoked when the application is about to restart. + pub fn on_app_restart( + &self, + mut on_restart: impl FnMut(&mut T, &mut App) + 'static, + ) -> Subscription + where + T: 'static, + { + let handle = self.weak_entity(); + self.app.on_app_restart(move |cx| { + handle.update(cx, |entity, cx| on_restart(entity, cx)).ok(); + }) + } + /// Arrange for the given function to be invoked whenever the application is quit. /// The future returned from this callback will be polled for up to [crate::SHUTDOWN_TIMEOUT] until the app fully quits. pub fn on_app_quit( @@ -175,20 +189,15 @@ impl<'a, T: 'static> Context<'a, T> { T: 'static, { let handle = self.weak_entity(); - let (subscription, activate) = self.app.quit_observers.insert( - (), - Box::new(move |cx| { - let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok(); - async move { - if let Some(future) = future { - future.await; - } + self.app.on_app_quit(move |cx| { + let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok(); + async move { + if let Some(future) = future { + future.await; } - .boxed_local() - }), - ); - activate(); - subscription + } + .boxed_local() + }) } /// Tell GPUI that this entity has changed and observers of it should be notified. @@ -463,7 +472,7 @@ impl<'a, T: 'static> Context<'a, T> { let view = self.weak_entity(); inner( - &mut self.keystroke_observers, + &self.keystroke_observers, Box::new(move |event, window, cx| { if let Some(view) = view.upgrade() { view.update(cx, |view, cx| f(view, event, window, cx)); @@ -601,16 +610,16 @@ impl<'a, T: 'static> Context<'a, T> { let (subscription, activate) = window.new_focus_listener(Box::new(move |event, window, cx| { view.update(cx, |view, cx| { - if let Some(blurred_id) = event.previous_focus_path.last().copied() { - if event.is_focus_out(focus_id) { - let event = FocusOutEvent { - blurred: WeakFocusHandle { - id: blurred_id, - handles: Arc::downgrade(&cx.focus_handles), - }, - }; - listener(view, event, window, cx) - } + if let Some(blurred_id) = event.previous_focus_path.last().copied() + && event.is_focus_out(focus_id) + { + let event = FocusOutEvent { + blurred: WeakFocusHandle { + id: blurred_id, + handles: Arc::downgrade(&cx.focus_handles), + }, + }; + listener(view, event, window, cx) } }) .is_ok() diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index fccb417caa70c7526a0f15a307d74caeabcdab77..ea52b46d9fce958f8cb6e878581fb988c146c43b 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -231,14 +231,15 @@ impl AnyEntity { Self { entity_id: id, entity_type, - entity_map: entity_map.clone(), #[cfg(any(test, feature = "leak-detection"))] handle_id: entity_map + .clone() .upgrade() .unwrap() .write() .leak_detector .handle_created(id), + entity_map, } } @@ -661,7 +662,7 @@ pub struct WeakEntity { impl std::fmt::Debug for WeakEntity { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct(&type_name::()) + f.debug_struct(type_name::()) .field("entity_id", &self.any_entity.entity_id) .field("entity_type", &type_name::()) .finish() @@ -786,7 +787,7 @@ impl PartialOrd for WeakEntity { #[cfg(any(test, feature = "leak-detection"))] static LEAK_BACKTRACE: std::sync::LazyLock = - std::sync::LazyLock::new(|| std::env::var("LEAK_BACKTRACE").map_or(false, |b| !b.is_empty())); + std::sync::LazyLock::new(|| std::env::var("LEAK_BACKTRACE").is_ok_and(|b| !b.is_empty())); #[cfg(any(test, feature = "leak-detection"))] #[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq)] diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 35e60326714f049faeaac54e8d979a91f9d97bbc..c65c045f6bc16310d3772825147ad570f209fd99 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -134,7 +134,7 @@ impl TestAppContext { app: App::new_app(platform.clone(), asset_source, http_client), background_executor, foreground_executor, - dispatcher: dispatcher.clone(), + dispatcher, test_platform: platform, text_system, fn_name, @@ -192,6 +192,7 @@ impl TestAppContext { &self.foreground_executor } + #[expect(clippy::wrong_self_convention)] fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { let mut cx = self.app.borrow_mut(); cx.new(build_entity) @@ -219,7 +220,7 @@ impl TestAppContext { let mut cx = self.app.borrow_mut(); // Some tests rely on the window size matching the bounds of the test display - let bounds = Bounds::maximized(None, &mut cx); + let bounds = Bounds::maximized(None, &cx); cx.open_window( WindowOptions { window_bounds: Some(WindowBounds::Windowed(bounds)), @@ -233,7 +234,7 @@ impl TestAppContext { /// Adds a new window with no content. pub fn add_empty_window(&mut self) -> &mut VisualTestContext { let mut cx = self.app.borrow_mut(); - let bounds = Bounds::maximized(None, &mut cx); + let bounds = Bounds::maximized(None, &cx); let window = cx .open_window( WindowOptions { @@ -244,7 +245,7 @@ impl TestAppContext { ) .unwrap(); drop(cx); - let cx = VisualTestContext::from_window(*window.deref(), self).as_mut(); + let cx = VisualTestContext::from_window(*window.deref(), self).into_mut(); cx.run_until_parked(); cx } @@ -261,7 +262,7 @@ impl TestAppContext { V: 'static + Render, { let mut cx = self.app.borrow_mut(); - let bounds = Bounds::maximized(None, &mut cx); + let bounds = Bounds::maximized(None, &cx); let window = cx .open_window( WindowOptions { @@ -273,7 +274,7 @@ impl TestAppContext { .unwrap(); drop(cx); let view = window.root(self).unwrap(); - let cx = VisualTestContext::from_window(*window.deref(), self).as_mut(); + let cx = VisualTestContext::from_window(*window.deref(), self).into_mut(); cx.run_until_parked(); // it might be nice to try and cleanup these at the end of each test. @@ -338,7 +339,7 @@ impl TestAppContext { /// Returns all windows open in the test. pub fn windows(&self) -> Vec { - self.app.borrow().windows().clone() + self.app.borrow().windows() } /// Run the given task on the main thread. @@ -585,7 +586,7 @@ impl Entity { cx.executor().advance_clock(advance_clock_by); async move { - let notification = crate::util::timeout(duration, rx.recv()) + let notification = crate::util::smol_timeout(duration, rx.recv()) .await .expect("next notification timed out"); drop(subscription); @@ -618,7 +619,7 @@ impl Entity { } }), cx.subscribe(self, { - let mut tx = tx.clone(); + let mut tx = tx; move |_, _: &Evt, _| { tx.blocking_send(()).ok(); } @@ -629,7 +630,7 @@ impl Entity { let handle = self.downgrade(); async move { - crate::util::timeout(Duration::from_secs(1), async move { + crate::util::smol_timeout(Duration::from_secs(1), async move { loop { { let cx = cx.borrow(); @@ -882,7 +883,7 @@ impl VisualTestContext { /// Get an &mut VisualTestContext (which is mostly what you need to pass to other methods). /// This method internally retains the VisualTestContext until the end of the test. - pub fn as_mut(self) -> &'static mut Self { + pub fn into_mut(self) -> &'static mut Self { let ptr = Box::into_raw(Box::new(self)); // safety: on_quit will be called after the test has finished. // the executor will ensure that all tasks related to the test have stopped. @@ -1025,7 +1026,7 @@ impl VisualContext for VisualTestContext { fn focus(&mut self, view: &Entity) -> Self::Result<()> { self.window .update(&mut self.cx, |_, window, cx| { - view.read(cx).focus_handle(cx).clone().focus(window) + view.read(cx).focus_handle(cx).focus(window) }) .unwrap() } diff --git a/crates/gpui/src/arena.rs b/crates/gpui/src/arena.rs index ee72d0e96425816220094f4cbff86315153afb74..0983bd23454c9a3a921ed721ecd32561387f9049 100644 --- a/crates/gpui/src/arena.rs +++ b/crates/gpui/src/arena.rs @@ -142,7 +142,7 @@ impl Arena { if self.current_chunk_index >= self.chunks.len() { self.chunks.push(Chunk::new(self.chunk_size)); assert_eq!(self.current_chunk_index, self.chunks.len() - 1); - log::info!( + log::trace!( "increased element arena capacity to {}kb", self.capacity() / 1024, ); diff --git a/crates/gpui/src/color.rs b/crates/gpui/src/color.rs index 639c84c10144310b14a94c2a22b84957b8b09524..cb7329c03fbb2064da0ef5873eef92c2c33d4953 100644 --- a/crates/gpui/src/color.rs +++ b/crates/gpui/src/color.rs @@ -905,9 +905,9 @@ mod tests { assert_eq!(background.solid, color); assert_eq!(background.opacity(0.5).solid, color.opacity(0.5)); - assert_eq!(background.is_transparent(), false); + assert!(!background.is_transparent()); background.solid = hsla(0.0, 0.0, 0.0, 0.0); - assert_eq!(background.is_transparent(), true); + assert!(background.is_transparent()); } #[test] @@ -921,7 +921,7 @@ mod tests { assert_eq!(background.opacity(0.5).colors[0], from.opacity(0.5)); assert_eq!(background.opacity(0.5).colors[1], to.opacity(0.5)); - assert_eq!(background.is_transparent(), false); - assert_eq!(background.opacity(0.0).is_transparent(), true); + assert!(!background.is_transparent()); + assert!(background.opacity(0.0).is_transparent()); } } diff --git a/crates/gpui/src/element.rs b/crates/gpui/src/element.rs index e5f49c7be141a3620e52599bcc2b151acc1f7319..f537bc5ac840432732ae8c9fb608ba74ffefa168 100644 --- a/crates/gpui/src/element.rs +++ b/crates/gpui/src/element.rs @@ -603,10 +603,8 @@ impl AnyElement { self.0.prepaint(window, cx); - if !focus_assigned { - if let Some(focus_id) = window.next_frame.focus { - return FocusHandle::for_id(focus_id, &cx.focus_handles); - } + if !focus_assigned && let Some(focus_id) = window.next_frame.focus { + return FocusHandle::for_id(focus_id, &cx.focus_handles); } None diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 09afbff929b99bb927d365621ea0550c28dcedf8..c9826b704e5732424f06e951c452331cb199a0fa 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -27,6 +27,7 @@ use crate::{ use collections::HashMap; use refineable::Refineable; use smallvec::SmallVec; +use stacksafe::{StackSafe, stacksafe}; use std::{ any::{Any, TypeId}, cell::RefCell, @@ -285,21 +286,20 @@ impl Interactivity { { self.mouse_move_listeners .push(Box::new(move |event, phase, hitbox, window, cx| { - if phase == DispatchPhase::Capture { - if let Some(drag) = &cx.active_drag { - if drag.value.as_ref().type_id() == TypeId::of::() { - (listener)( - &DragMoveEvent { - event: event.clone(), - bounds: hitbox.bounds, - drag: PhantomData, - dragged_item: Arc::clone(&drag.value), - }, - window, - cx, - ); - } - } + if phase == DispatchPhase::Capture + && let Some(drag) = &cx.active_drag + && drag.value.as_ref().type_id() == TypeId::of::() + { + (listener)( + &DragMoveEvent { + event: event.clone(), + bounds: hitbox.bounds, + drag: PhantomData, + dragged_item: Arc::clone(&drag.value), + }, + window, + cx, + ); } })); } @@ -1195,7 +1195,7 @@ pub fn div() -> Div { /// A [`Div`] element, the all-in-one element for building complex UIs in GPUI pub struct Div { interactivity: Interactivity, - children: SmallVec<[AnyElement; 2]>, + children: SmallVec<[StackSafe; 2]>, prepaint_listener: Option>, &mut Window, &mut App) + 'static>>, image_cache: Option>, } @@ -1256,7 +1256,8 @@ impl InteractiveElement for Div { impl ParentElement for Div { fn extend(&mut self, elements: impl IntoIterator) { - self.children.extend(elements) + self.children + .extend(elements.into_iter().map(StackSafe::new)) } } @@ -1272,6 +1273,7 @@ impl Element for Div { self.interactivity.source_location() } + #[stacksafe] fn request_layout( &mut self, global_id: Option<&GlobalElementId>, @@ -1307,6 +1309,7 @@ impl Element for Div { (layout_id, DivFrameState { child_layout_ids }) } + #[stacksafe] fn prepaint( &mut self, global_id: Option<&GlobalElementId>, @@ -1376,6 +1379,7 @@ impl Element for Div { ) } + #[stacksafe] fn paint( &mut self, global_id: Option<&GlobalElementId>, @@ -1509,15 +1513,14 @@ impl Interactivity { let mut element_state = element_state.map(|element_state| element_state.unwrap_or_default()); - if let Some(element_state) = element_state.as_ref() { - if cx.has_active_drag() { - if let Some(pending_mouse_down) = element_state.pending_mouse_down.as_ref() - { - *pending_mouse_down.borrow_mut() = None; - } - if let Some(clicked_state) = element_state.clicked_state.as_ref() { - *clicked_state.borrow_mut() = ElementClickedState::default(); - } + if let Some(element_state) = element_state.as_ref() + && cx.has_active_drag() + { + if let Some(pending_mouse_down) = element_state.pending_mouse_down.as_ref() { + *pending_mouse_down.borrow_mut() = None; + } + if let Some(clicked_state) = element_state.clicked_state.as_ref() { + *clicked_state.borrow_mut() = ElementClickedState::default(); } } @@ -1525,35 +1528,35 @@ impl Interactivity { // If there's an explicit focus handle we're tracking, use that. Otherwise // create a new handle and store it in the element state, which lives for as // as frames contain an element with this id. - if self.focusable && self.tracked_focus_handle.is_none() { - if let Some(element_state) = element_state.as_mut() { - let mut handle = element_state - .focus_handle - .get_or_insert_with(|| cx.focus_handle()) - .clone() - .tab_stop(false); - - if let Some(index) = self.tab_index { - handle = handle.tab_index(index).tab_stop(true); - } - - self.tracked_focus_handle = Some(handle); + if self.focusable + && self.tracked_focus_handle.is_none() + && let Some(element_state) = element_state.as_mut() + { + let mut handle = element_state + .focus_handle + .get_or_insert_with(|| cx.focus_handle()) + .clone() + .tab_stop(false); + + if let Some(index) = self.tab_index { + handle = handle.tab_index(index).tab_stop(true); } + + self.tracked_focus_handle = Some(handle); } if let Some(scroll_handle) = self.tracked_scroll_handle.as_ref() { self.scroll_offset = Some(scroll_handle.0.borrow().offset.clone()); - } else if self.base_style.overflow.x == Some(Overflow::Scroll) - || self.base_style.overflow.y == Some(Overflow::Scroll) + } else if (self.base_style.overflow.x == Some(Overflow::Scroll) + || self.base_style.overflow.y == Some(Overflow::Scroll)) + && let Some(element_state) = element_state.as_mut() { - if let Some(element_state) = element_state.as_mut() { - self.scroll_offset = Some( - element_state - .scroll_offset - .get_or_insert_with(Rc::default) - .clone(), - ); - } + self.scroll_offset = Some( + element_state + .scroll_offset + .get_or_insert_with(Rc::default) + .clone(), + ); } let style = self.compute_style_internal(None, element_state.as_mut(), window, cx); @@ -2026,26 +2029,27 @@ impl Interactivity { let hitbox = hitbox.clone(); window.on_mouse_event({ move |_: &MouseUpEvent, phase, window, cx| { - if let Some(drag) = &cx.active_drag { - if phase == DispatchPhase::Bubble && hitbox.is_hovered(window) { - let drag_state_type = drag.value.as_ref().type_id(); - for (drop_state_type, listener) in &drop_listeners { - if *drop_state_type == drag_state_type { - let drag = cx - .active_drag - .take() - .expect("checked for type drag state type above"); - - let mut can_drop = true; - if let Some(predicate) = &can_drop_predicate { - can_drop = predicate(drag.value.as_ref(), window, cx); - } + if let Some(drag) = &cx.active_drag + && phase == DispatchPhase::Bubble + && hitbox.is_hovered(window) + { + let drag_state_type = drag.value.as_ref().type_id(); + for (drop_state_type, listener) in &drop_listeners { + if *drop_state_type == drag_state_type { + let drag = cx + .active_drag + .take() + .expect("checked for type drag state type above"); + + let mut can_drop = true; + if let Some(predicate) = &can_drop_predicate { + can_drop = predicate(drag.value.as_ref(), window, cx); + } - if can_drop { - listener(drag.value.as_ref(), window, cx); - window.refresh(); - cx.stop_propagation(); - } + if can_drop { + listener(drag.value.as_ref(), window, cx); + window.refresh(); + cx.stop_propagation(); } } } @@ -2089,31 +2093,24 @@ impl Interactivity { } let mut pending_mouse_down = pending_mouse_down.borrow_mut(); - if let Some(mouse_down) = pending_mouse_down.clone() { - if !cx.has_active_drag() - && (event.position - mouse_down.position).magnitude() - > DRAG_THRESHOLD - { - if let Some((drag_value, drag_listener)) = drag_listener.take() { - *clicked_state.borrow_mut() = ElementClickedState::default(); - let cursor_offset = event.position - hitbox.origin; - let drag = (drag_listener)( - drag_value.as_ref(), - cursor_offset, - window, - cx, - ); - cx.active_drag = Some(AnyDrag { - view: drag, - value: drag_value, - cursor_offset, - cursor_style: drag_cursor_style, - }); - pending_mouse_down.take(); - window.refresh(); - cx.stop_propagation(); - } - } + if let Some(mouse_down) = pending_mouse_down.clone() + && !cx.has_active_drag() + && (event.position - mouse_down.position).magnitude() > DRAG_THRESHOLD + && let Some((drag_value, drag_listener)) = drag_listener.take() + { + *clicked_state.borrow_mut() = ElementClickedState::default(); + let cursor_offset = event.position - hitbox.origin; + let drag = + (drag_listener)(drag_value.as_ref(), cursor_offset, window, cx); + cx.active_drag = Some(AnyDrag { + view: drag, + value: drag_value, + cursor_offset, + cursor_style: drag_cursor_style, + }); + pending_mouse_down.take(); + window.refresh(); + cx.stop_propagation(); } } }); @@ -2277,7 +2274,7 @@ impl Interactivity { window.on_mouse_event(move |_: &MouseDownEvent, phase, window, _cx| { if phase == DispatchPhase::Bubble && !window.default_prevented() { let group_hovered = active_group_hitbox - .map_or(false, |group_hitbox_id| group_hitbox_id.is_hovered(window)); + .is_some_and(|group_hitbox_id| group_hitbox_id.is_hovered(window)); let element_hovered = hitbox.is_hovered(window); if group_hovered || element_hovered { *active_state.borrow_mut() = ElementClickedState { @@ -2423,33 +2420,32 @@ impl Interactivity { style.refine(&self.base_style); if let Some(focus_handle) = self.tracked_focus_handle.as_ref() { - if let Some(in_focus_style) = self.in_focus_style.as_ref() { - if focus_handle.within_focused(window, cx) { - style.refine(in_focus_style); - } + if let Some(in_focus_style) = self.in_focus_style.as_ref() + && focus_handle.within_focused(window, cx) + { + style.refine(in_focus_style); } - if let Some(focus_style) = self.focus_style.as_ref() { - if focus_handle.is_focused(window) { - style.refine(focus_style); - } + if let Some(focus_style) = self.focus_style.as_ref() + && focus_handle.is_focused(window) + { + style.refine(focus_style); } } if let Some(hitbox) = hitbox { if !cx.has_active_drag() { - if let Some(group_hover) = self.group_hover_style.as_ref() { - if let Some(group_hitbox_id) = GroupHitboxes::get(&group_hover.group, cx) { - if group_hitbox_id.is_hovered(window) { - style.refine(&group_hover.style); - } - } + if let Some(group_hover) = self.group_hover_style.as_ref() + && let Some(group_hitbox_id) = GroupHitboxes::get(&group_hover.group, cx) + && group_hitbox_id.is_hovered(window) + { + style.refine(&group_hover.style); } - if let Some(hover_style) = self.hover_style.as_ref() { - if hitbox.is_hovered(window) { - style.refine(hover_style); - } + if let Some(hover_style) = self.hover_style.as_ref() + && hitbox.is_hovered(window) + { + style.refine(hover_style); } } @@ -2463,12 +2459,10 @@ impl Interactivity { for (state_type, group_drag_style) in &self.group_drag_over_styles { if let Some(group_hitbox_id) = GroupHitboxes::get(&group_drag_style.group, cx) + && *state_type == drag.value.as_ref().type_id() + && group_hitbox_id.is_hovered(window) { - if *state_type == drag.value.as_ref().type_id() - && group_hitbox_id.is_hovered(window) - { - style.refine(&group_drag_style.style); - } + style.refine(&group_drag_style.style); } } @@ -2490,16 +2484,16 @@ impl Interactivity { .clicked_state .get_or_insert_with(Default::default) .borrow(); - if clicked_state.group { - if let Some(group) = self.group_active_style.as_ref() { - style.refine(&group.style) - } + if clicked_state.group + && let Some(group) = self.group_active_style.as_ref() + { + style.refine(&group.style) } - if let Some(active_style) = self.active_style.as_ref() { - if clicked_state.element { - style.refine(active_style) - } + if let Some(active_style) = self.active_style.as_ref() + && clicked_state.element + { + style.refine(active_style) } } @@ -2620,7 +2614,7 @@ pub(crate) fn register_tooltip_mouse_handlers( window.on_mouse_event({ let active_tooltip = active_tooltip.clone(); move |_: &MouseDownEvent, _phase, window: &mut Window, _cx| { - if !tooltip_id.map_or(false, |tooltip_id| tooltip_id.is_hovered(window)) { + if !tooltip_id.is_some_and(|tooltip_id| tooltip_id.is_hovered(window)) { clear_active_tooltip_if_not_hoverable(&active_tooltip, window); } } @@ -2629,7 +2623,7 @@ pub(crate) fn register_tooltip_mouse_handlers( window.on_mouse_event({ let active_tooltip = active_tooltip.clone(); move |_: &ScrollWheelEvent, _phase, window: &mut Window, _cx| { - if !tooltip_id.map_or(false, |tooltip_id| tooltip_id.is_hovered(window)) { + if !tooltip_id.is_some_and(|tooltip_id| tooltip_id.is_hovered(window)) { clear_active_tooltip_if_not_hoverable(&active_tooltip, window); } } @@ -2785,7 +2779,7 @@ fn handle_tooltip_check_visible_and_update( match action { Action::None => {} - Action::Hide => clear_active_tooltip(&active_tooltip, window), + Action::Hide => clear_active_tooltip(active_tooltip, window), Action::ScheduleHide(tooltip) => { let delayed_hide_task = window.spawn(cx, { let active_tooltip = active_tooltip.clone(); diff --git a/crates/gpui/src/elements/image_cache.rs b/crates/gpui/src/elements/image_cache.rs index e7bdeaf9eb4d26913718a9b235cee4fcb0ca85ff..ee1436134a30f70e7015ab1c86f60733e60e9164 100644 --- a/crates/gpui/src/elements/image_cache.rs +++ b/crates/gpui/src/elements/image_cache.rs @@ -64,7 +64,7 @@ mod any_image_cache { cx: &mut App, ) -> Option, ImageCacheError>> { let image_cache = image_cache.clone().downcast::().unwrap(); - return image_cache.update(cx, |image_cache, cx| image_cache.load(resource, window, cx)); + image_cache.update(cx, |image_cache, cx| image_cache.load(resource, window, cx)) } } @@ -297,10 +297,10 @@ impl RetainAllImageCache { /// Remove the image from the cache by the given source. pub fn remove(&mut self, source: &Resource, window: &mut Window, cx: &mut App) { let hash = hash(source); - if let Some(mut item) = self.0.remove(&hash) { - if let Some(Ok(image)) = item.get() { - cx.drop_image(image, Some(window)); - } + if let Some(mut item) = self.0.remove(&hash) + && let Some(Ok(image)) = item.get() + { + cx.drop_image(image, Some(window)); } } diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 993b319b697ece386ad8af6d6164c1b85bf3a1c7..893860d7e1b781144b2d8de06ae2135420854ed7 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -379,13 +379,12 @@ impl Element for Img { None => { if let Some(state) = &mut state { if let Some((started_loading, _)) = state.started_loading { - if started_loading.elapsed() > LOADING_DELAY { - if let Some(loading) = self.style.loading.as_ref() { - let mut element = loading(); - replacement_id = - Some(element.request_layout(window, cx)); - layout_state.replacement = Some(element); - } + if started_loading.elapsed() > LOADING_DELAY + && let Some(loading) = self.style.loading.as_ref() + { + let mut element = loading(); + replacement_id = Some(element.request_layout(window, cx)); + layout_state.replacement = Some(element); } } else { let current_view = window.current_view(); @@ -476,7 +475,7 @@ impl Element for Img { .paint_image( new_bounds, corner_radii, - data.clone(), + data, layout_state.frame_index, self.style.grayscale, ) diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 39f38bdc69d6a5d4c9ce8c7c349707e906124cca..6758f4eee1e03c5b32f1dd924ed6d37fa31cf767 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -732,46 +732,44 @@ impl StateInner { item.element.prepaint_at(item_origin, window, cx); }); - if let Some(autoscroll_bounds) = window.take_autoscroll() { - if autoscroll { - if autoscroll_bounds.top() < bounds.top() { - return Err(ListOffset { - item_ix: item.index, - offset_in_item: autoscroll_bounds.top() - item_origin.y, - }); - } else if autoscroll_bounds.bottom() > bounds.bottom() { - let mut cursor = self.items.cursor::(&()); - cursor.seek(&Count(item.index), Bias::Right); - let mut height = bounds.size.height - padding.top - padding.bottom; - - // Account for the height of the element down until the autoscroll bottom. - height -= autoscroll_bounds.bottom() - item_origin.y; - - // Keep decreasing the scroll top until we fill all the available space. - while height > Pixels::ZERO { - cursor.prev(); - let Some(item) = cursor.item() else { break }; - - let size = item.size().unwrap_or_else(|| { - let mut item = render_item(cursor.start().0, window, cx); - let item_available_size = size( - bounds.size.width.into(), - AvailableSpace::MinContent, - ); - item.layout_as_root(item_available_size, window, cx) - }); - height -= size.height; - } - - return Err(ListOffset { - item_ix: cursor.start().0, - offset_in_item: if height < Pixels::ZERO { - -height - } else { - Pixels::ZERO - }, + if let Some(autoscroll_bounds) = window.take_autoscroll() + && autoscroll + { + if autoscroll_bounds.top() < bounds.top() { + return Err(ListOffset { + item_ix: item.index, + offset_in_item: autoscroll_bounds.top() - item_origin.y, + }); + } else if autoscroll_bounds.bottom() > bounds.bottom() { + let mut cursor = self.items.cursor::(&()); + cursor.seek(&Count(item.index), Bias::Right); + let mut height = bounds.size.height - padding.top - padding.bottom; + + // Account for the height of the element down until the autoscroll bottom. + height -= autoscroll_bounds.bottom() - item_origin.y; + + // Keep decreasing the scroll top until we fill all the available space. + while height > Pixels::ZERO { + cursor.prev(); + let Some(item) = cursor.item() else { break }; + + let size = item.size().unwrap_or_else(|| { + let mut item = render_item(cursor.start().0, window, cx); + let item_available_size = + size(bounds.size.width.into(), AvailableSpace::MinContent); + item.layout_as_root(item_available_size, window, cx) }); + height -= size.height; } + + return Err(ListOffset { + item_ix: cursor.start().0, + offset_in_item: if height < Pixels::ZERO { + -height + } else { + Pixels::ZERO + }, + }); } } @@ -940,9 +938,10 @@ impl Element for List { let hitbox = window.insert_hitbox(bounds, HitboxBehavior::Normal); // If the width of the list has changed, invalidate all cached item heights - if state.last_layout_bounds.map_or(true, |last_bounds| { - last_bounds.size.width != bounds.size.width - }) { + if state + .last_layout_bounds + .is_none_or(|last_bounds| last_bounds.size.width != bounds.size.width) + { let new_items = SumTree::from_iter( state.items.iter().map(|item| ListItem::Unmeasured { focus_handle: item.focus_handle(), diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 014f617e2cfc74755908368f57060aeaeb38aa74..b5e071279623611685ea744e38b072284e764e2a 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -326,7 +326,7 @@ impl TextLayout { vec![text_style.to_run(text.len())] }; - let layout_id = window.request_measured_layout(Default::default(), { + window.request_measured_layout(Default::default(), { let element_state = self.clone(); move |known_dimensions, available_space, window, cx| { @@ -356,12 +356,11 @@ impl TextLayout { (None, "".into()) }; - if let Some(text_layout) = element_state.0.borrow().as_ref() { - if text_layout.size.is_some() - && (wrap_width.is_none() || wrap_width == text_layout.wrap_width) - { - return text_layout.size.unwrap(); - } + if let Some(text_layout) = element_state.0.borrow().as_ref() + && text_layout.size.is_some() + && (wrap_width.is_none() || wrap_width == text_layout.wrap_width) + { + return text_layout.size.unwrap(); } let mut line_wrapper = cx.text_system().line_wrapper(text_style.font(), font_size); @@ -417,9 +416,7 @@ impl TextLayout { size } - }); - - layout_id + }) } fn prepaint(&self, bounds: Bounds, text: &str) { @@ -763,14 +760,13 @@ impl Element for InteractiveText { let mut interactive_state = interactive_state.unwrap_or_default(); if let Some(click_listener) = self.click_listener.take() { let mouse_position = window.mouse_position(); - if let Ok(ix) = text_layout.index_for_position(mouse_position) { - if self + if let Ok(ix) = text_layout.index_for_position(mouse_position) + && self .clickable_ranges .iter() .any(|range| range.contains(&ix)) - { - window.set_cursor_style(crate::CursorStyle::PointingHand, hitbox) - } + { + window.set_cursor_style(crate::CursorStyle::PointingHand, hitbox) } let text_layout = text_layout.clone(); @@ -803,13 +799,13 @@ impl Element for InteractiveText { } else { let hitbox = hitbox.clone(); window.on_mouse_event(move |event: &MouseDownEvent, phase, window, _| { - if phase == DispatchPhase::Bubble && hitbox.is_hovered(window) { - if let Ok(mouse_down_index) = + if phase == DispatchPhase::Bubble + && hitbox.is_hovered(window) + && let Ok(mouse_down_index) = text_layout.index_for_position(event.position) - { - mouse_down.set(Some(mouse_down_index)); - window.refresh(); - } + { + mouse_down.set(Some(mouse_down_index)); + window.refresh(); } }); } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 3d2d9cd9db693f8aa72f314da324ab2cb8e98789..87cabc8cd9f446fddcb5c98dafbdf956eb1efea2 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -9,12 +9,14 @@ use refineable::Refineable; use schemars::{JsonSchema, json_schema}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use std::borrow::Cow; +use std::ops::Range; use std::{ cmp::{self, PartialOrd}, fmt::{self, Display}, hash::Hash, ops::{Add, Div, Mul, MulAssign, Neg, Sub}, }; +use taffy::prelude::{TaffyGridLine, TaffyGridSpan}; use crate::{App, DisplayId}; @@ -1044,7 +1046,7 @@ where size: self.size.clone() + size( amount.left.clone() + amount.right.clone(), - amount.top.clone() + amount.bottom.clone(), + amount.top.clone() + amount.bottom, ), } } @@ -1157,10 +1159,10 @@ where /// Computes the space available within outer bounds. pub fn space_within(&self, outer: &Self) -> Edges { Edges { - top: self.top().clone() - outer.top().clone(), - right: outer.right().clone() - self.right().clone(), - bottom: outer.bottom().clone() - self.bottom().clone(), - left: self.left().clone() - outer.left().clone(), + top: self.top() - outer.top(), + right: outer.right() - self.right(), + bottom: outer.bottom() - self.bottom(), + left: self.left() - outer.left(), } } } @@ -1639,7 +1641,7 @@ impl Bounds { } /// Convert the bounds from logical pixels to physical pixels - pub fn to_device_pixels(&self, factor: f32) -> Bounds { + pub fn to_device_pixels(self, factor: f32) -> Bounds { Bounds { origin: point( DevicePixels((self.origin.x.0 * factor).round() as i32), @@ -1710,7 +1712,7 @@ where top: self.top.clone() * rhs.top, right: self.right.clone() * rhs.right, bottom: self.bottom.clone() * rhs.bottom, - left: self.left.clone() * rhs.left, + left: self.left * rhs.left, } } } @@ -1955,7 +1957,7 @@ impl Edges { /// assert_eq!(edges_in_pixels.bottom, px(32.0)); // 2 rems /// assert_eq!(edges_in_pixels.left, px(50.0)); // 25% of parent width /// ``` - pub fn to_pixels(&self, parent_size: Size, rem_size: Pixels) -> Edges { + pub fn to_pixels(self, parent_size: Size, rem_size: Pixels) -> Edges { Edges { top: self.top.to_pixels(parent_size.height, rem_size), right: self.right.to_pixels(parent_size.width, rem_size), @@ -2025,7 +2027,7 @@ impl Edges { /// assert_eq!(edges_in_pixels.bottom, px(20.0)); // Already in pixels /// assert_eq!(edges_in_pixels.left, px(32.0)); // 2 rems converted to pixels /// ``` - pub fn to_pixels(&self, rem_size: Pixels) -> Edges { + pub fn to_pixels(self, rem_size: Pixels) -> Edges { Edges { top: self.top.to_pixels(rem_size), right: self.right.to_pixels(rem_size), @@ -2270,7 +2272,7 @@ impl Corners { /// assert_eq!(corners_in_pixels.bottom_right, Pixels(30.0)); /// assert_eq!(corners_in_pixels.bottom_left, Pixels(32.0)); // 2 rems converted to pixels /// ``` - pub fn to_pixels(&self, rem_size: Pixels) -> Corners { + pub fn to_pixels(self, rem_size: Pixels) -> Corners { Corners { top_left: self.top_left.to_pixels(rem_size), top_right: self.top_right.to_pixels(rem_size), @@ -2409,7 +2411,7 @@ where top_left: self.top_left.clone() * rhs.top_left, top_right: self.top_right.clone() * rhs.top_right, bottom_right: self.bottom_right.clone() * rhs.bottom_right, - bottom_left: self.bottom_left.clone() * rhs.bottom_left, + bottom_left: self.bottom_left * rhs.bottom_left, } } } @@ -2856,7 +2858,7 @@ impl DevicePixels { /// let total_bytes = pixels.to_bytes(bytes_per_pixel); /// assert_eq!(total_bytes, 40); // 10 pixels * 4 bytes/pixel = 40 bytes /// ``` - pub fn to_bytes(&self, bytes_per_pixel: u8) -> u32 { + pub fn to_bytes(self, bytes_per_pixel: u8) -> u32 { self.0 as u32 * bytes_per_pixel as u32 } } @@ -3071,8 +3073,8 @@ pub struct Rems(pub f32); impl Rems { /// Convert this Rem value to pixels. - pub fn to_pixels(&self, rem_size: Pixels) -> Pixels { - *self * rem_size + pub fn to_pixels(self, rem_size: Pixels) -> Pixels { + self * rem_size } } @@ -3166,9 +3168,9 @@ impl AbsoluteLength { /// assert_eq!(length_in_pixels.to_pixels(rem_size), Pixels(42.0)); /// assert_eq!(length_in_rems.to_pixels(rem_size), Pixels(32.0)); /// ``` - pub fn to_pixels(&self, rem_size: Pixels) -> Pixels { + pub fn to_pixels(self, rem_size: Pixels) -> Pixels { match self { - AbsoluteLength::Pixels(pixels) => *pixels, + AbsoluteLength::Pixels(pixels) => pixels, AbsoluteLength::Rems(rems) => rems.to_pixels(rem_size), } } @@ -3182,10 +3184,10 @@ impl AbsoluteLength { /// # Returns /// /// Returns the `AbsoluteLength` as `Pixels`. - pub fn to_rems(&self, rem_size: Pixels) -> Rems { + pub fn to_rems(self, rem_size: Pixels) -> Rems { match self { AbsoluteLength::Pixels(pixels) => Rems(pixels.0 / rem_size.0), - AbsoluteLength::Rems(rems) => *rems, + AbsoluteLength::Rems(rems) => rems, } } } @@ -3313,12 +3315,12 @@ impl DefiniteLength { /// assert_eq!(length_in_rems.to_pixels(base_size, rem_size), Pixels(32.0)); /// assert_eq!(length_as_fraction.to_pixels(base_size, rem_size), Pixels(50.0)); /// ``` - pub fn to_pixels(&self, base_size: AbsoluteLength, rem_size: Pixels) -> Pixels { + pub fn to_pixels(self, base_size: AbsoluteLength, rem_size: Pixels) -> Pixels { match self { DefiniteLength::Absolute(size) => size.to_pixels(rem_size), DefiniteLength::Fraction(fraction) => match base_size { - AbsoluteLength::Pixels(px) => px * *fraction, - AbsoluteLength::Rems(rems) => rems * rem_size * *fraction, + AbsoluteLength::Pixels(px) => px * fraction, + AbsoluteLength::Rems(rems) => rems * rem_size * fraction, }, } } @@ -3608,6 +3610,37 @@ impl From<()> for Length { } } +/// A location in a grid layout. +#[derive(Clone, PartialEq, Debug, Serialize, Deserialize, JsonSchema, Default)] +pub struct GridLocation { + /// The rows this item uses within the grid. + pub row: Range, + /// The columns this item uses within the grid. + pub column: Range, +} + +/// The placement of an item within a grid layout's column or row. +#[derive(Clone, Copy, PartialEq, Debug, Serialize, Deserialize, JsonSchema, Default)] +pub enum GridPlacement { + /// The grid line index to place this item. + Line(i16), + /// The number of grid lines to span. + Span(u16), + /// Automatically determine the placement, equivalent to Span(1) + #[default] + Auto, +} + +impl From for taffy::GridPlacement { + fn from(placement: GridPlacement) -> Self { + match placement { + GridPlacement::Line(index) => taffy::GridPlacement::from_line_index(index), + GridPlacement::Span(span) => taffy::GridPlacement::from_span(span), + GridPlacement::Auto => taffy::GridPlacement::Auto, + } + } +} + /// Provides a trait for types that can calculate half of their value. /// /// The `Half` trait is used for types that can be evenly divided, returning a new instance of the same type diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 09799eb910f0eeece17fd9975c3c13f6accd2df6..5e4b5fe6e9d221a2915e1f8234eb01e14c177a46 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -157,7 +157,7 @@ pub use taffy::{AvailableSpace, LayoutId}; #[cfg(any(test, feature = "test-support"))] pub use test::*; pub use text_system::*; -pub use util::arc_cow::ArcCow; +pub use util::{FutureExt, Timeout, arc_cow::ArcCow}; pub use view::*; pub use window::*; @@ -172,6 +172,10 @@ pub trait AppContext { type Result; /// Create a new entity in the app context. + #[expect( + clippy::wrong_self_convention, + reason = "`App::new` is an ubiquitous function for creating entities" + )] fn new( &mut self, build_entity: impl FnOnce(&mut Context) -> T, diff --git a/crates/gpui/src/inspector.rs b/crates/gpui/src/inspector.rs index 23c46edcc11ed36cfbe3ad110dc296af3e129784..9f86576a599845bb9e09760e8001333b9dea745d 100644 --- a/crates/gpui/src/inspector.rs +++ b/crates/gpui/src/inspector.rs @@ -164,7 +164,7 @@ mod conditional { if let Some(render_inspector) = cx .inspector_element_registry .renderers_by_type_id - .remove(&type_id) + .remove(type_id) { let mut element = (render_inspector)( active_element.id.clone(), diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index cc6ebb9b08db114c1aabbb2a58296fc3aa8a9949..95374e579fa5cc11d84c2ba7e9ec88f261d8d2b2 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -408,7 +408,7 @@ impl DispatchTree { keymap .bindings_for_action(action) .filter(|binding| { - Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack) + Self::binding_matches_predicate_and_not_shadowed(&keymap, binding, context_stack) }) .cloned() .collect() @@ -426,7 +426,7 @@ impl DispatchTree { .bindings_for_action(action) .rev() .find(|binding| { - Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack) + Self::binding_matches_predicate_and_not_shadowed(&keymap, binding, context_stack) }) .cloned() } @@ -458,7 +458,7 @@ impl DispatchTree { .keymap .borrow() .bindings_for_input(input, &context_stack); - return (bindings, partial, context_stack); + (bindings, partial, context_stack) } /// dispatch_key processes the keystroke @@ -611,9 +611,17 @@ impl DispatchTree { #[cfg(test)] mod tests { - use std::{cell::RefCell, rc::Rc}; - - use crate::{Action, ActionRegistry, DispatchTree, KeyBinding, KeyContext, Keymap}; + use crate::{ + self as gpui, Element, ElementId, GlobalElementId, InspectorElementId, LayoutId, Style, + }; + use core::panic; + use std::{cell::RefCell, ops::Range, rc::Rc}; + + use crate::{ + Action, ActionRegistry, App, Bounds, Context, DispatchTree, FocusHandle, InputHandler, + IntoElement, KeyBinding, KeyContext, Keymap, Pixels, Point, Render, TestAppContext, + UTF16Selection, Window, + }; #[derive(PartialEq, Eq)] struct TestAction; @@ -631,10 +639,7 @@ mod tests { } fn partial_eq(&self, action: &dyn Action) -> bool { - action - .as_any() - .downcast_ref::() - .map_or(false, |a| self == a) + action.as_any().downcast_ref::() == Some(self) } fn boxed_clone(&self) -> std::boxed::Box { @@ -674,4 +679,165 @@ mod tests { assert!(keybinding[0].action.partial_eq(&TestAction)) } + + #[crate::test] + fn test_input_handler_pending(cx: &mut TestAppContext) { + #[derive(Clone)] + struct CustomElement { + focus_handle: FocusHandle, + text: Rc>, + } + impl CustomElement { + fn new(cx: &mut Context) -> Self { + Self { + focus_handle: cx.focus_handle(), + text: Rc::default(), + } + } + } + impl Element for CustomElement { + type RequestLayoutState = (); + + type PrepaintState = (); + + fn id(&self) -> Option { + Some("custom".into()) + } + fn source_location(&self) -> Option<&'static panic::Location<'static>> { + None + } + fn request_layout( + &mut self, + _: Option<&GlobalElementId>, + _: Option<&InspectorElementId>, + window: &mut Window, + cx: &mut App, + ) -> (LayoutId, Self::RequestLayoutState) { + (window.request_layout(Style::default(), [], cx), ()) + } + fn prepaint( + &mut self, + _: Option<&GlobalElementId>, + _: Option<&InspectorElementId>, + _: Bounds, + _: &mut Self::RequestLayoutState, + window: &mut Window, + cx: &mut App, + ) -> Self::PrepaintState { + window.set_focus_handle(&self.focus_handle, cx); + } + fn paint( + &mut self, + _: Option<&GlobalElementId>, + _: Option<&InspectorElementId>, + _: Bounds, + _: &mut Self::RequestLayoutState, + _: &mut Self::PrepaintState, + window: &mut Window, + cx: &mut App, + ) { + let mut key_context = KeyContext::default(); + key_context.add("Terminal"); + window.set_key_context(key_context); + window.handle_input(&self.focus_handle, self.clone(), cx); + window.on_action(std::any::TypeId::of::(), |_, _, _, _| {}); + } + } + impl IntoElement for CustomElement { + type Element = Self; + + fn into_element(self) -> Self::Element { + self + } + } + + impl InputHandler for CustomElement { + fn selected_text_range( + &mut self, + _: bool, + _: &mut Window, + _: &mut App, + ) -> Option { + None + } + + fn marked_text_range(&mut self, _: &mut Window, _: &mut App) -> Option> { + None + } + + fn text_for_range( + &mut self, + _: Range, + _: &mut Option>, + _: &mut Window, + _: &mut App, + ) -> Option { + None + } + + fn replace_text_in_range( + &mut self, + replacement_range: Option>, + text: &str, + _: &mut Window, + _: &mut App, + ) { + if replacement_range.is_some() { + unimplemented!() + } + self.text.borrow_mut().push_str(text) + } + + fn replace_and_mark_text_in_range( + &mut self, + replacement_range: Option>, + new_text: &str, + _: Option>, + _: &mut Window, + _: &mut App, + ) { + if replacement_range.is_some() { + unimplemented!() + } + self.text.borrow_mut().push_str(new_text) + } + + fn unmark_text(&mut self, _: &mut Window, _: &mut App) {} + + fn bounds_for_range( + &mut self, + _: Range, + _: &mut Window, + _: &mut App, + ) -> Option> { + None + } + + fn character_index_for_point( + &mut self, + _: Point, + _: &mut Window, + _: &mut App, + ) -> Option { + None + } + } + impl Render for CustomElement { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + self.clone() + } + } + + cx.update(|cx| { + cx.bind_keys([KeyBinding::new("ctrl-b", TestAction, Some("Terminal"))]); + cx.bind_keys([KeyBinding::new("ctrl-b h", TestAction, Some("Terminal"))]); + }); + let (test, cx) = cx.add_window_view(|_, cx| CustomElement::new(cx)); + cx.update(|window, cx| { + window.focus(&test.read(cx).focus_handle); + window.activate_window(); + }); + cx.simulate_keystrokes("ctrl-b ["); + test.update(cx, |test, _| assert_eq!(test.text.borrow().as_str(), "[")) + } } diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index 83d7479a04423d249a2be69c69756211eb9d485d..757205fcc3d2a886744582769951b94abf754352 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -148,7 +148,7 @@ impl Keymap { let mut pending_bindings = SmallVec::<[(BindingIndex, &KeyBinding); 1]>::new(); for (ix, binding) in self.bindings().enumerate().rev() { - let Some(depth) = self.binding_enabled(binding, &context_stack) else { + let Some(depth) = self.binding_enabled(binding, context_stack) else { continue; }; let Some(pending) = binding.match_keystrokes(input) else { @@ -264,7 +264,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let (result, pending) = keymap.bindings_for_input( &[Keystroke::parse("ctrl-a").unwrap()], @@ -290,7 +290,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); // binding is only enabled in a specific context assert!( @@ -344,7 +344,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let space = || Keystroke::parse("space").unwrap(); let w = || Keystroke::parse("w").unwrap(); @@ -364,29 +364,29 @@ mod tests { // Ensure `space` results in pending input on the workspace, but not editor let space_workspace = keymap.bindings_for_input(&[space()], &workspace_context()); assert!(space_workspace.0.is_empty()); - assert_eq!(space_workspace.1, true); + assert!(space_workspace.1); let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context()); assert!(space_editor.0.is_empty()); - assert_eq!(space_editor.1, false); + assert!(!space_editor.1); // Ensure `space w` results in pending input on the workspace, but not editor let space_w_workspace = keymap.bindings_for_input(&space_w, &workspace_context()); assert!(space_w_workspace.0.is_empty()); - assert_eq!(space_w_workspace.1, true); + assert!(space_w_workspace.1); let space_w_editor = keymap.bindings_for_input(&space_w, &editor_workspace_context()); assert!(space_w_editor.0.is_empty()); - assert_eq!(space_w_editor.1, false); + assert!(!space_w_editor.1); // Ensure `space w w` results in the binding in the workspace, but not in the editor let space_w_w_workspace = keymap.bindings_for_input(&space_w_w, &workspace_context()); assert!(!space_w_w_workspace.0.is_empty()); - assert_eq!(space_w_w_workspace.1, false); + assert!(!space_w_w_workspace.1); let space_w_w_editor = keymap.bindings_for_input(&space_w_w, &editor_workspace_context()); assert!(space_w_w_editor.0.is_empty()); - assert_eq!(space_w_w_editor.1, false); + assert!(!space_w_w_editor.1); // Now test what happens if we have another binding defined AFTER the NoAction // that should result in pending @@ -396,11 +396,11 @@ mod tests { KeyBinding::new("space w x", ActionAlpha {}, Some("editor")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context()); assert!(space_editor.0.is_empty()); - assert_eq!(space_editor.1, true); + assert!(space_editor.1); // Now test what happens if we have another binding defined BEFORE the NoAction // that should result in pending @@ -410,11 +410,11 @@ mod tests { KeyBinding::new("space w w", NoAction {}, Some("editor")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context()); assert!(space_editor.0.is_empty()); - assert_eq!(space_editor.1, true); + assert!(space_editor.1); // Now test what happens if we have another binding defined at a higher context // that should result in pending @@ -424,11 +424,11 @@ mod tests { KeyBinding::new("space w w", NoAction {}, Some("editor")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context()); assert!(space_editor.0.is_empty()); - assert_eq!(space_editor.1, true); + assert!(space_editor.1); } #[test] @@ -439,7 +439,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); // Ensure `space` results in pending input on the workspace, but not editor let (result, pending) = keymap.bindings_for_input( @@ -447,7 +447,7 @@ mod tests { &[KeyContext::parse("editor").unwrap()], ); assert!(result.is_empty()); - assert_eq!(pending, true); + assert!(pending); let bindings = [ KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")), @@ -455,7 +455,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); // Ensure `space` results in pending input on the workspace, but not editor let (result, pending) = keymap.bindings_for_input( @@ -463,7 +463,7 @@ mod tests { &[KeyContext::parse("editor").unwrap()], ); assert_eq!(result.len(), 1); - assert_eq!(pending, false); + assert!(!pending); } #[test] @@ -474,7 +474,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); // Ensure `space` results in pending input on the workspace, but not editor let (result, pending) = keymap.bindings_for_input( @@ -482,7 +482,7 @@ mod tests { &[KeyContext::parse("editor").unwrap()], ); assert!(result.is_empty()); - assert_eq!(pending, false); + assert!(!pending); } #[test] @@ -494,7 +494,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); // Ensure `space` results in pending input on the workspace, but not editor let (result, pending) = keymap.bindings_for_input( @@ -505,7 +505,7 @@ mod tests { ], ); assert_eq!(result.len(), 1); - assert_eq!(pending, false); + assert!(!pending); } #[test] @@ -516,7 +516,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); // Ensure `space` results in pending input on the workspace, but not editor let (result, pending) = keymap.bindings_for_input( @@ -527,7 +527,7 @@ mod tests { ], ); assert_eq!(result.len(), 0); - assert_eq!(pending, false); + assert!(!pending); } #[test] @@ -537,7 +537,7 @@ mod tests { KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let matched = keymap.bindings_for_input( &[Keystroke::parse("ctrl-x")].map(Result::unwrap), @@ -560,7 +560,7 @@ mod tests { KeyBinding::new("ctrl-x 0", NoAction, Some("Workspace")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let matched = keymap.bindings_for_input( &[Keystroke::parse("ctrl-x")].map(Result::unwrap), @@ -579,7 +579,7 @@ mod tests { KeyBinding::new("ctrl-x 0", NoAction, Some("vim_mode == normal")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let matched = keymap.bindings_for_input( &[Keystroke::parse("ctrl-x")].map(Result::unwrap), @@ -602,7 +602,7 @@ mod tests { KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")), ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); let matched = keymap.bindings_for_input( &[Keystroke::parse("ctrl-x")].map(Result::unwrap), @@ -629,7 +629,7 @@ mod tests { ]; let mut keymap = Keymap::default(); - keymap.add_bindings(bindings.clone()); + keymap.add_bindings(bindings); assert_bindings(&keymap, &ActionAlpha {}, &["ctrl-a"]); assert_bindings(&keymap, &ActionBeta {}, &[]); diff --git a/crates/gpui/src/keymap/binding.rs b/crates/gpui/src/keymap/binding.rs index 1d3f612c5bef76d75cb1bd8ee9d9c686190c3fd7..729498d153b62b3e250421c82b4bdc05e6c0030f 100644 --- a/crates/gpui/src/keymap/binding.rs +++ b/crates/gpui/src/keymap/binding.rs @@ -30,11 +30,8 @@ impl Clone for KeyBinding { impl KeyBinding { /// Construct a new keybinding from the given data. Panics on parse error. pub fn new(keystrokes: &str, action: A, context: Option<&str>) -> Self { - let context_predicate = if let Some(context) = context { - Some(KeyBindingContextPredicate::parse(context).unwrap().into()) - } else { - None - }; + let context_predicate = + context.map(|context| KeyBindingContextPredicate::parse(context).unwrap().into()); Self::load(keystrokes, Box::new(action), context_predicate, None, None).unwrap() } @@ -53,10 +50,10 @@ impl KeyBinding { if let Some(equivalents) = key_equivalents { for keystroke in keystrokes.iter_mut() { - if keystroke.key.chars().count() == 1 { - if let Some(key) = equivalents.get(&keystroke.key.chars().next().unwrap()) { - keystroke.key = key.to_string(); - } + if keystroke.key.chars().count() == 1 + && let Some(key) = equivalents.get(&keystroke.key.chars().next().unwrap()) + { + keystroke.key = key.to_string(); } } } diff --git a/crates/gpui/src/keymap/context.rs b/crates/gpui/src/keymap/context.rs index f4b878ae772ece481f9e7e7e2241c476dd8e4153..960bd1752fe8c1527b9c593658e429af4cd61029 100644 --- a/crates/gpui/src/keymap/context.rs +++ b/crates/gpui/src/keymap/context.rs @@ -287,7 +287,7 @@ impl KeyBindingContextPredicate { return false; } } - return true; + true } // Workspace > Pane > Editor // @@ -305,7 +305,7 @@ impl KeyBindingContextPredicate { return true; } } - return false; + false } Self::And(left, right) => { left.eval_inner(contexts, all_contexts) && right.eval_inner(contexts, all_contexts) @@ -461,6 +461,8 @@ fn skip_whitespace(source: &str) -> &str { #[cfg(test)] mod tests { + use core::slice; + use super::*; use crate as gpui; use KeyBindingContextPredicate::*; @@ -666,20 +668,16 @@ mod tests { let contexts = vec![other_context.clone(), child_context.clone()]; assert!(!predicate.eval(&contexts)); - let contexts = vec![ - parent_context.clone(), - other_context.clone(), - child_context.clone(), - ]; + let contexts = vec![parent_context.clone(), other_context, child_context.clone()]; assert!(predicate.eval(&contexts)); assert!(!predicate.eval(&[])); - assert!(!predicate.eval(&[child_context.clone()])); + assert!(!predicate.eval(slice::from_ref(&child_context))); assert!(!predicate.eval(&[parent_context])); let zany_predicate = KeyBindingContextPredicate::parse("child > child").unwrap(); - assert!(!zany_predicate.eval(&[child_context.clone()])); - assert!(zany_predicate.eval(&[child_context.clone(), child_context.clone()])); + assert!(!zany_predicate.eval(slice::from_ref(&child_context))); + assert!(zany_predicate.eval(&[child_context.clone(), child_context])); } #[test] @@ -690,13 +688,13 @@ mod tests { let parent_context = KeyContext::try_from("parent").unwrap(); let child_context = KeyContext::try_from("child").unwrap(); - assert!(not_predicate.eval(&[workspace_context.clone()])); - assert!(!not_predicate.eval(&[editor_context.clone()])); + assert!(not_predicate.eval(slice::from_ref(&workspace_context))); + assert!(!not_predicate.eval(slice::from_ref(&editor_context))); assert!(!not_predicate.eval(&[editor_context.clone(), workspace_context.clone()])); assert!(!not_predicate.eval(&[workspace_context.clone(), editor_context.clone()])); let complex_not = KeyBindingContextPredicate::parse("!editor && workspace").unwrap(); - assert!(complex_not.eval(&[workspace_context.clone()])); + assert!(complex_not.eval(slice::from_ref(&workspace_context))); assert!(!complex_not.eval(&[editor_context.clone(), workspace_context.clone()])); let not_mode_predicate = KeyBindingContextPredicate::parse("!(mode == full)").unwrap(); @@ -709,18 +707,18 @@ mod tests { assert!(not_mode_predicate.eval(&[other_mode_context])); let not_descendant = KeyBindingContextPredicate::parse("!(parent > child)").unwrap(); - assert!(not_descendant.eval(&[parent_context.clone()])); - assert!(not_descendant.eval(&[child_context.clone()])); + assert!(not_descendant.eval(slice::from_ref(&parent_context))); + assert!(not_descendant.eval(slice::from_ref(&child_context))); assert!(!not_descendant.eval(&[parent_context.clone(), child_context.clone()])); let not_descendant = KeyBindingContextPredicate::parse("parent > !child").unwrap(); - assert!(!not_descendant.eval(&[parent_context.clone()])); - assert!(!not_descendant.eval(&[child_context.clone()])); - assert!(!not_descendant.eval(&[parent_context.clone(), child_context.clone()])); + assert!(!not_descendant.eval(slice::from_ref(&parent_context))); + assert!(!not_descendant.eval(slice::from_ref(&child_context))); + assert!(!not_descendant.eval(&[parent_context, child_context])); let double_not = KeyBindingContextPredicate::parse("!!editor").unwrap(); - assert!(double_not.eval(&[editor_context.clone()])); - assert!(!double_not.eval(&[workspace_context.clone()])); + assert!(double_not.eval(slice::from_ref(&editor_context))); + assert!(!double_not.eval(slice::from_ref(&workspace_context))); // Test complex descendant cases let workspace_context = KeyContext::try_from("Workspace").unwrap(); @@ -754,9 +752,9 @@ mod tests { // !Workspace - shouldn't match when Workspace is in the context let not_workspace = KeyBindingContextPredicate::parse("!Workspace").unwrap(); - assert!(!not_workspace.eval(&[workspace_context.clone()])); - assert!(not_workspace.eval(&[pane_context.clone()])); - assert!(not_workspace.eval(&[editor_context.clone()])); + assert!(!not_workspace.eval(slice::from_ref(&workspace_context))); + assert!(not_workspace.eval(slice::from_ref(&pane_context))); + assert!(not_workspace.eval(slice::from_ref(&editor_context))); assert!(!not_workspace.eval(&workspace_pane_editor)); } } diff --git a/crates/gpui/src/path_builder.rs b/crates/gpui/src/path_builder.rs index 6c8cfddd523c4d56c81ebcbbf1437b5cc418d73c..38903ea5885a4fbd0ed1454046a9021aa572d6e3 100644 --- a/crates/gpui/src/path_builder.rs +++ b/crates/gpui/src/path_builder.rs @@ -278,7 +278,7 @@ impl PathBuilder { options: &StrokeOptions, ) -> Result, Error> { let path = if let Some(dash_array) = dash_array { - let measurements = lyon::algorithms::measure::PathMeasurements::from_path(&path, 0.01); + let measurements = lyon::algorithms::measure::PathMeasurements::from_path(path, 0.01); let mut sampler = measurements .create_sampler(path, lyon::algorithms::measure::SampleType::Normalized); let mut builder = lyon::path::Path::builder(); diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index b495d70dfdd3594a27ed3c1793e7e0ac4e7e0b4a..4d2feeaf1d041245b110bf25674fd18145a9a7ee 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -220,7 +220,11 @@ pub(crate) trait Platform: 'static { &self, options: PathPromptOptions, ) -> oneshot::Receiver>>>; - fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver>>; + fn prompt_for_new_path( + &self, + directory: &Path, + suggested_name: Option<&str>, + ) -> oneshot::Receiver>>; fn can_select_mixed_files_and_dirs(&self) -> bool; fn reveal_path(&self, path: &Path); fn open_with_system(&self, path: &Path); @@ -588,7 +592,7 @@ impl PlatformTextSystem for NoopTextSystem { } fn font_id(&self, _descriptor: &Font) -> Result { - return Ok(FontId(1)); + Ok(FontId(1)) } fn font_metrics(&self, _font_id: FontId) -> FontMetrics { @@ -669,7 +673,7 @@ impl PlatformTextSystem for NoopTextSystem { } } let mut runs = Vec::default(); - if glyphs.len() > 0 { + if !glyphs.is_empty() { runs.push(ShapedRun { font_id: FontId(0), glyphs, @@ -1274,7 +1278,7 @@ pub enum WindowBackgroundAppearance { } /// The options that can be configured for a file dialog prompt -#[derive(Copy, Clone, Debug)] +#[derive(Clone, Debug)] pub struct PathPromptOptions { /// Should the prompt allow files to be selected? pub files: bool, @@ -1282,6 +1286,8 @@ pub struct PathPromptOptions { pub directories: bool, /// Should the prompt allow multiple files to be selected? pub multiple: bool, + /// The prompt to show to a user when selecting a path + pub prompt: Option, } /// What kind of prompt styling to show @@ -1502,7 +1508,7 @@ impl ClipboardItem { for entry in self.entries.iter() { if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry { - answer.push_str(&text); + answer.push_str(text); any_entries = true; } } diff --git a/crates/gpui/src/platform/app_menu.rs b/crates/gpui/src/platform/app_menu.rs index 2815cbdd7fe40cc5ec465969595b961ee42ebc69..4069fee7268c18bd5dbdfb3f3fb8a73d2f7f6e92 100644 --- a/crates/gpui/src/platform/app_menu.rs +++ b/crates/gpui/src/platform/app_menu.rs @@ -20,6 +20,34 @@ impl Menu { } } +/// OS menus are menus that are recognized by the operating system +/// This allows the operating system to provide specialized items for +/// these menus +pub struct OsMenu { + /// The name of the menu + pub name: SharedString, + + /// The type of menu + pub menu_type: SystemMenuType, +} + +impl OsMenu { + /// Create an OwnedOsMenu from this OsMenu + pub fn owned(self) -> OwnedOsMenu { + OwnedOsMenu { + name: self.name.to_string().into(), + menu_type: self.menu_type, + } + } +} + +/// The type of system menu +#[derive(Copy, Clone, Eq, PartialEq)] +pub enum SystemMenuType { + /// The 'Services' menu in the Application menu on macOS + Services, +} + /// The different kinds of items that can be in a menu pub enum MenuItem { /// A separator between items @@ -28,6 +56,9 @@ pub enum MenuItem { /// A submenu Submenu(Menu), + /// A menu, managed by the system (for example, the Services menu on macOS) + SystemMenu(OsMenu), + /// An action that can be performed Action { /// The name of this menu item @@ -53,6 +84,14 @@ impl MenuItem { Self::Submenu(menu) } + /// Creates a new submenu that is populated by the OS + pub fn os_submenu(name: impl Into, menu_type: SystemMenuType) -> Self { + Self::SystemMenu(OsMenu { + name: name.into(), + menu_type, + }) + } + /// Creates a new menu item that invokes an action pub fn action(name: impl Into, action: impl Action) -> Self { Self::Action { @@ -89,10 +128,23 @@ impl MenuItem { action, os_action, }, + MenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.owned()), } } } +/// OS menus are menus that are recognized by the operating system +/// This allows the operating system to provide specialized items for +/// these menus +#[derive(Clone)] +pub struct OwnedOsMenu { + /// The name of the menu + pub name: SharedString, + + /// The type of menu + pub menu_type: SystemMenuType, +} + /// A menu of the application, either a main menu or a submenu #[derive(Clone)] pub struct OwnedMenu { @@ -111,6 +163,9 @@ pub enum OwnedMenuItem { /// A submenu Submenu(OwnedMenu), + /// A menu, managed by the system (for example, the Services menu on macOS) + SystemMenu(OwnedOsMenu), + /// An action that can be performed Action { /// The name of this menu item @@ -139,6 +194,7 @@ impl Clone for OwnedMenuItem { action: action.boxed_clone(), os_action: *os_action, }, + OwnedMenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.clone()), } } } diff --git a/crates/gpui/src/platform/blade/blade_context.rs b/crates/gpui/src/platform/blade/blade_context.rs index 48872f16198a4ed2d1fc8c2a0b1cbce3eb0de477..12c68a1e70188d3ed2ab425b5abc1bac0dfe3a19 100644 --- a/crates/gpui/src/platform/blade/blade_context.rs +++ b/crates/gpui/src/platform/blade/blade_context.rs @@ -49,7 +49,7 @@ fn parse_pci_id(id: &str) -> anyhow::Result { "Expected a 4 digit PCI ID in hexadecimal format" ); - return u32::from_str_radix(id, 16).context("parsing PCI ID as hex"); + u32::from_str_radix(id, 16).context("parsing PCI ID as hex") } #[cfg(test)] diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index 46d3c16c72a9c10c0e686aff425fcc236c253ce7..cc1df7748ba6b7947ab53a86baa8ab31644ac05d 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -434,24 +434,24 @@ impl BladeRenderer { } fn wait_for_gpu(&mut self) { - if let Some(last_sp) = self.last_sync_point.take() { - if !self.gpu.wait_for(&last_sp, MAX_FRAME_TIME_MS) { - log::error!("GPU hung"); - #[cfg(target_os = "linux")] - if self.gpu.device_information().driver_name == "radv" { - log::error!( - "there's a known bug with amdgpu/radv, try setting ZED_PATH_SAMPLE_COUNT=0 as a workaround" - ); - log::error!( - "if that helps you're running into https://github.com/zed-industries/zed/issues/26143" - ); - } + if let Some(last_sp) = self.last_sync_point.take() + && !self.gpu.wait_for(&last_sp, MAX_FRAME_TIME_MS) + { + log::error!("GPU hung"); + #[cfg(target_os = "linux")] + if self.gpu.device_information().driver_name == "radv" { log::error!( - "your device information is: {:?}", - self.gpu.device_information() + "there's a known bug with amdgpu/radv, try setting ZED_PATH_SAMPLE_COUNT=0 as a workaround" + ); + log::error!( + "if that helps you're running into https://github.com/zed-industries/zed/issues/26143" ); - while !self.gpu.wait_for(&last_sp, MAX_FRAME_TIME_MS) {} } + log::error!( + "your device information is: {:?}", + self.gpu.device_information() + ); + while !self.gpu.wait_for(&last_sp, MAX_FRAME_TIME_MS) {} } } diff --git a/crates/gpui/src/platform/blade/shaders.wgsl b/crates/gpui/src/platform/blade/shaders.wgsl index b1ffb1812effa24e674e0238fcc6ddef7dc0f882..95980b54fe4f25b3936d6b095219c5674211dd0a 100644 --- a/crates/gpui/src/platform/blade/shaders.wgsl +++ b/crates/gpui/src/platform/blade/shaders.wgsl @@ -1057,6 +1057,9 @@ fn vs_underline(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) @fragment fn fs_underline(input: UnderlineVarying) -> @location(0) vec4 { + const WAVE_FREQUENCY: f32 = 2.0; + const WAVE_HEIGHT_RATIO: f32 = 0.8; + // Alpha clip first, since we don't have `clip_distance`. if (any(input.clip_distances < vec4(0.0))) { return vec4(0.0); @@ -1069,9 +1072,11 @@ fn fs_underline(input: UnderlineVarying) -> @location(0) vec4 { } let half_thickness = underline.thickness * 0.5; + let st = (input.position.xy - underline.bounds.origin) / underline.bounds.size.y - vec2(0.0, 0.5); - let frequency = M_PI_F * 3.0 * underline.thickness / 3.0; - let amplitude = 1.0 / (4.0 * underline.thickness); + let frequency = M_PI_F * WAVE_FREQUENCY * underline.thickness / underline.bounds.size.y; + let amplitude = (underline.thickness * WAVE_HEIGHT_RATIO) / underline.bounds.size.y; + let sine = sin(st.x * frequency) * amplitude; let dSine = cos(st.x * frequency) * amplitude * frequency; let distance = (st.y - sine) / sqrt(1.0 + dSine * dSine); diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index fe6a36baa854856eb961a020ab35a7bd0195d465..3fb1ef45729e1e79f339aa19ad11554e7fce3772 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -108,13 +108,13 @@ impl LinuxCommon { let callbacks = PlatformHandlers::default(); - let dispatcher = Arc::new(LinuxDispatcher::new(main_sender.clone())); + let dispatcher = Arc::new(LinuxDispatcher::new(main_sender)); let background_executor = BackgroundExecutor::new(dispatcher.clone()); let common = LinuxCommon { background_executor, - foreground_executor: ForegroundExecutor::new(dispatcher.clone()), + foreground_executor: ForegroundExecutor::new(dispatcher), text_system, appearance: WindowAppearance::Light, auto_hide_scrollbars: false, @@ -294,6 +294,7 @@ impl Platform for P { let request = match ashpd::desktop::file_chooser::OpenFileRequest::default() .modal(true) .title(title) + .accept_label(options.prompt.as_ref().map(crate::SharedString::as_str)) .multiple(options.multiple) .directory(options.directories) .send() @@ -327,26 +328,35 @@ impl Platform for P { done_rx } - fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver>> { + fn prompt_for_new_path( + &self, + directory: &Path, + suggested_name: Option<&str>, + ) -> oneshot::Receiver>> { let (done_tx, done_rx) = oneshot::channel(); #[cfg(not(any(feature = "wayland", feature = "x11")))] - let _ = (done_tx.send(Ok(None)), directory); + let _ = (done_tx.send(Ok(None)), directory, suggested_name); #[cfg(any(feature = "wayland", feature = "x11"))] self.foreground_executor() .spawn({ let directory = directory.to_owned(); + let suggested_name = suggested_name.map(|s| s.to_owned()); async move { - let request = match ashpd::desktop::file_chooser::SaveFileRequest::default() - .modal(true) - .title("Save File") - .current_folder(directory) - .expect("pathbuf should not be nul terminated") - .send() - .await - { + let mut request_builder = + ashpd::desktop::file_chooser::SaveFileRequest::default() + .modal(true) + .title("Save File") + .current_folder(directory) + .expect("pathbuf should not be nul terminated"); + + if let Some(suggested_name) = suggested_name { + request_builder = request_builder.current_name(suggested_name.as_str()); + } + + let request = match request_builder.send().await { Ok(request) => request, Err(err) => { let result = match err { @@ -431,7 +441,7 @@ impl Platform for P { fn app_path(&self) -> Result { // get the path of the executable of the current process let app_path = env::current_exe()?; - return Ok(app_path); + Ok(app_path) } fn set_menus(&self, menus: Vec
, _keymap: &Keymap) { @@ -632,7 +642,7 @@ pub(super) fn get_xkb_compose_state(cx: &xkb::Context) -> Option = None; for locale in locales { if let Ok(table) = - xkb::compose::Table::new_from_locale(&cx, &locale, xkb::compose::COMPILE_NO_FLAGS) + xkb::compose::Table::new_from_locale(cx, &locale, xkb::compose::COMPILE_NO_FLAGS) { state = Some(xkb::compose::State::new( &table, @@ -657,7 +667,7 @@ pub(super) const DEFAULT_CURSOR_ICON_NAME: &str = "left_ptr"; impl CursorStyle { #[cfg(any(feature = "wayland", feature = "x11"))] - pub(super) fn to_icon_names(&self) -> &'static [&'static str] { + pub(super) fn to_icon_names(self) -> &'static [&'static str] { // Based on cursor names from chromium: // https://github.com/chromium/chromium/blob/d3069cf9c973dc3627fa75f64085c6a86c8f41bf/ui/base/cursor/cursor_factory.cc#L113 match self { @@ -980,21 +990,18 @@ mod tests { #[test] fn test_is_within_click_distance() { let zero = Point::new(px(0.0), px(0.0)); - assert_eq!( - is_within_click_distance(zero, Point::new(px(5.0), px(5.0))), - true - ); - assert_eq!( - is_within_click_distance(zero, Point::new(px(-4.9), px(5.0))), - true - ); - assert_eq!( - is_within_click_distance(Point::new(px(3.0), px(2.0)), Point::new(px(-2.0), px(-2.0))), - true - ); - assert_eq!( - is_within_click_distance(zero, Point::new(px(5.0), px(5.1))), - false - ); + assert!(is_within_click_distance(zero, Point::new(px(5.0), px(5.0)))); + assert!(is_within_click_distance( + zero, + Point::new(px(-4.9), px(5.0)) + )); + assert!(is_within_click_distance( + Point::new(px(3.0), px(2.0)), + Point::new(px(-2.0), px(-2.0)) + )); + assert!(!is_within_click_distance( + zero, + Point::new(px(5.0), px(5.1)) + ),); } } diff --git a/crates/gpui/src/platform/linux/text_system.rs b/crates/gpui/src/platform/linux/text_system.rs index e6f6e9a6809d8bb7b2063abbb23d3b9b1567497d..f66a2e71d49f39c0e82770e23aa8eca752970daf 100644 --- a/crates/gpui/src/platform/linux/text_system.rs +++ b/crates/gpui/src/platform/linux/text_system.rs @@ -213,11 +213,7 @@ impl CosmicTextSystemState { features: &FontFeatures, ) -> Result> { // TODO: Determine the proper system UI font. - let name = if name == ".SystemUIFont" { - "Zed Plex Sans" - } else { - name - }; + let name = crate::text_system::font_name_with_fallbacks(name, "IBM Plex Sans"); let families = self .font_system diff --git a/crates/gpui/src/platform/linux/wayland.rs b/crates/gpui/src/platform/linux/wayland.rs index cf73832b11fb1baad08bf5ee3142e461876fbe92..487bc9f38c927609100a238ac4726c2aab3b87b0 100644 --- a/crates/gpui/src/platform/linux/wayland.rs +++ b/crates/gpui/src/platform/linux/wayland.rs @@ -12,7 +12,7 @@ use wayland_protocols::wp::cursor_shape::v1::client::wp_cursor_shape_device_v1:: use crate::CursorStyle; impl CursorStyle { - pub(super) fn to_shape(&self) -> Shape { + pub(super) fn to_shape(self) -> Shape { match self { CursorStyle::Arrow => Shape::Default, CursorStyle::IBeam => Shape::Text, diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 72e4477ecf697a9f6443dffb80e0637202d3b848..189cfa19545f052cf8ebc75b89c1f955d3396859 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -359,13 +359,13 @@ impl WaylandClientStatePtr { } changed }; - if changed { - if let Some(mut callback) = state.common.callbacks.keyboard_layout_change.take() { - drop(state); - callback(); - state = client.borrow_mut(); - state.common.callbacks.keyboard_layout_change = Some(callback); - } + + if changed && let Some(mut callback) = state.common.callbacks.keyboard_layout_change.take() + { + drop(state); + callback(); + state = client.borrow_mut(); + state.common.callbacks.keyboard_layout_change = Some(callback); } } @@ -373,15 +373,15 @@ impl WaylandClientStatePtr { let mut client = self.get_client(); let mut state = client.borrow_mut(); let closed_window = state.windows.remove(surface_id).unwrap(); - if let Some(window) = state.mouse_focused_window.take() { - if !window.ptr_eq(&closed_window) { - state.mouse_focused_window = Some(window); - } + if let Some(window) = state.mouse_focused_window.take() + && !window.ptr_eq(&closed_window) + { + state.mouse_focused_window = Some(window); } - if let Some(window) = state.keyboard_focused_window.take() { - if !window.ptr_eq(&closed_window) { - state.keyboard_focused_window = Some(window); - } + if let Some(window) = state.keyboard_focused_window.take() + && !window.ptr_eq(&closed_window) + { + state.keyboard_focused_window = Some(window); } if state.windows.is_empty() { state.common.signal.stop(); @@ -528,7 +528,7 @@ impl WaylandClient { client.common.appearance = appearance; - for (_, window) in &mut client.windows { + for window in client.windows.values_mut() { window.set_appearance(appearance); } } @@ -710,9 +710,7 @@ impl LinuxClient for WaylandClient { fn set_cursor_style(&self, style: CursorStyle) { let mut state = self.0.borrow_mut(); - let need_update = state - .cursor_style - .map_or(true, |current_style| current_style != style); + let need_update = state.cursor_style != Some(style); if need_update { let serial = state.serial_tracker.get(SerialKind::MouseEnter); @@ -951,11 +949,8 @@ impl Dispatch for WaylandClientStatePtr { }; drop(state); - match event { - wl_callback::Event::Done { .. } => { - window.frame(); - } - _ => {} + if let wl_callback::Event::Done { .. } = event { + window.frame(); } } } @@ -1145,7 +1140,7 @@ impl Dispatch for WaylandClientStatePtr { .globals .text_input_manager .as_ref() - .map(|text_input_manager| text_input_manager.get_text_input(&seat, qh, ())); + .map(|text_input_manager| text_input_manager.get_text_input(seat, qh, ())); if let Some(wl_keyboard) = &state.wl_keyboard { wl_keyboard.release(); @@ -1285,7 +1280,6 @@ impl Dispatch for WaylandClientStatePtr { let Some(focused_window) = focused_window else { return; }; - let focused_window = focused_window.clone(); let keymap_state = state.keymap_state.as_ref().unwrap(); let keycode = Keycode::from(key + MIN_KEYCODE); @@ -1294,7 +1288,7 @@ impl Dispatch for WaylandClientStatePtr { match key_state { wl_keyboard::KeyState::Pressed if !keysym.is_modifier_key() => { let mut keystroke = - Keystroke::from_xkb(&keymap_state, state.modifiers, keycode); + Keystroke::from_xkb(keymap_state, state.modifiers, keycode); if let Some(mut compose) = state.compose_state.take() { compose.feed(keysym); match compose.status() { @@ -1538,12 +1532,9 @@ impl Dispatch for WaylandClientStatePtr { cursor_shape_device.set_shape(serial, style.to_shape()); } else { let scale = window.primary_output_scale(); - state.cursor.set_icon( - &wl_pointer, - serial, - style.to_icon_names(), - scale, - ); + state + .cursor + .set_icon(wl_pointer, serial, style.to_icon_names(), scale); } } drop(state); @@ -1580,7 +1571,7 @@ impl Dispatch for WaylandClientStatePtr { if state .keyboard_focused_window .as_ref() - .map_or(false, |keyboard_window| window.ptr_eq(&keyboard_window)) + .is_some_and(|keyboard_window| window.ptr_eq(keyboard_window)) { state.enter_token = None; } @@ -1787,17 +1778,17 @@ impl Dispatch for WaylandClientStatePtr { drop(state); window.handle_input(input); } - } else if let Some(discrete) = discrete { - if let Some(window) = state.mouse_focused_window.clone() { - let input = PlatformInput::ScrollWheel(ScrollWheelEvent { - position: state.mouse_location.unwrap(), - delta: ScrollDelta::Lines(discrete), - modifiers: state.modifiers, - touch_phase: TouchPhase::Moved, - }); - drop(state); - window.handle_input(input); - } + } else if let Some(discrete) = discrete + && let Some(window) = state.mouse_focused_window.clone() + { + let input = PlatformInput::ScrollWheel(ScrollWheelEvent { + position: state.mouse_location.unwrap(), + delta: ScrollDelta::Lines(discrete), + modifiers: state.modifiers, + touch_phase: TouchPhase::Moved, + }); + drop(state); + window.handle_input(input); } } } @@ -2019,25 +2010,22 @@ impl Dispatch for WaylandClientStatePtr { let client = this.get_client(); let mut state = client.borrow_mut(); - match event { - wl_data_offer::Event::Offer { mime_type } => { - // Drag and drop - if mime_type == FILE_LIST_MIME_TYPE { - let serial = state.serial_tracker.get(SerialKind::DataDevice); - let mime_type = mime_type.clone(); - data_offer.accept(serial, Some(mime_type)); - } + if let wl_data_offer::Event::Offer { mime_type } = event { + // Drag and drop + if mime_type == FILE_LIST_MIME_TYPE { + let serial = state.serial_tracker.get(SerialKind::DataDevice); + let mime_type = mime_type.clone(); + data_offer.accept(serial, Some(mime_type)); + } - // Clipboard - if let Some(offer) = state - .data_offers - .iter_mut() - .find(|wrapper| wrapper.inner.id() == data_offer.id()) - { - offer.add_mime_type(mime_type); - } + // Clipboard + if let Some(offer) = state + .data_offers + .iter_mut() + .find(|wrapper| wrapper.inner.id() == data_offer.id()) + { + offer.add_mime_type(mime_type); } - _ => {} } } } @@ -2118,13 +2106,10 @@ impl Dispatch let client = this.get_client(); let mut state = client.borrow_mut(); - match event { - zwp_primary_selection_offer_v1::Event::Offer { mime_type } => { - if let Some(offer) = state.primary_data_offer.as_mut() { - offer.add_mime_type(mime_type); - } - } - _ => {} + if let zwp_primary_selection_offer_v1::Event::Offer { mime_type } = event + && let Some(offer) = state.primary_data_offer.as_mut() + { + offer.add_mime_type(mime_type); } } } diff --git a/crates/gpui/src/platform/linux/wayland/cursor.rs b/crates/gpui/src/platform/linux/wayland/cursor.rs index 2a24d0e1ba347fb718da126120bc809c65d93b33..c7c9139dea795701e459387a309b1817e2f60971 100644 --- a/crates/gpui/src/platform/linux/wayland/cursor.rs +++ b/crates/gpui/src/platform/linux/wayland/cursor.rs @@ -45,10 +45,11 @@ impl Cursor { } fn set_theme_internal(&mut self, theme_name: Option) { - if let Some(loaded_theme) = self.loaded_theme.as_ref() { - if loaded_theme.name == theme_name && loaded_theme.scaled_size == self.scaled_size { - return; - } + if let Some(loaded_theme) = self.loaded_theme.as_ref() + && loaded_theme.name == theme_name + && loaded_theme.scaled_size == self.scaled_size + { + return; } let result = if let Some(theme_name) = theme_name.as_ref() { CursorTheme::load_from_name( @@ -66,7 +67,7 @@ impl Cursor { { self.loaded_theme = Some(LoadedTheme { theme, - name: theme_name.map(|name| name.to_string()), + name: theme_name, scaled_size: self.scaled_size, }); } @@ -144,7 +145,7 @@ impl Cursor { hot_y as i32 / scale, ); - self.surface.attach(Some(&buffer), 0, 0); + self.surface.attach(Some(buffer), 0, 0); self.surface.damage(0, 0, width as i32, height as i32); self.surface.commit(); } diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index 2b2207e22c86fc25e6387581bb92b9c304f4bc9d..7570c58c09e8d5c63091174fa51bc30c54c005e1 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -355,85 +355,82 @@ impl WaylandWindowStatePtr { } pub fn handle_xdg_surface_event(&self, event: xdg_surface::Event) { - match event { - xdg_surface::Event::Configure { serial } => { - { - let mut state = self.state.borrow_mut(); - if let Some(window_controls) = state.in_progress_window_controls.take() { - state.window_controls = window_controls; - - drop(state); - let mut callbacks = self.callbacks.borrow_mut(); - if let Some(appearance_changed) = callbacks.appearance_changed.as_mut() { - appearance_changed(); - } + if let xdg_surface::Event::Configure { serial } = event { + { + let mut state = self.state.borrow_mut(); + if let Some(window_controls) = state.in_progress_window_controls.take() { + state.window_controls = window_controls; + + drop(state); + let mut callbacks = self.callbacks.borrow_mut(); + if let Some(appearance_changed) = callbacks.appearance_changed.as_mut() { + appearance_changed(); } } - { - let mut state = self.state.borrow_mut(); - - if let Some(mut configure) = state.in_progress_configure.take() { - let got_unmaximized = state.maximized && !configure.maximized; - state.fullscreen = configure.fullscreen; - state.maximized = configure.maximized; - state.tiling = configure.tiling; - // Limit interactive resizes to once per vblank - if configure.resizing && state.resize_throttle { - return; - } else if configure.resizing { - state.resize_throttle = true; - } - if !configure.fullscreen && !configure.maximized { - configure.size = if got_unmaximized { - Some(state.window_bounds.size) - } else { - compute_outer_size(state.inset(), configure.size, state.tiling) - }; - if let Some(size) = configure.size { - state.window_bounds = Bounds { - origin: Point::default(), - size, - }; - } - } - drop(state); + } + { + let mut state = self.state.borrow_mut(); + + if let Some(mut configure) = state.in_progress_configure.take() { + let got_unmaximized = state.maximized && !configure.maximized; + state.fullscreen = configure.fullscreen; + state.maximized = configure.maximized; + state.tiling = configure.tiling; + // Limit interactive resizes to once per vblank + if configure.resizing && state.resize_throttle { + return; + } else if configure.resizing { + state.resize_throttle = true; + } + if !configure.fullscreen && !configure.maximized { + configure.size = if got_unmaximized { + Some(state.window_bounds.size) + } else { + compute_outer_size(state.inset(), configure.size, state.tiling) + }; if let Some(size) = configure.size { - self.resize(size); + state.window_bounds = Bounds { + origin: Point::default(), + size, + }; } } - } - let mut state = self.state.borrow_mut(); - state.xdg_surface.ack_configure(serial); - - let window_geometry = inset_by_tiling( - state.bounds.map_origin(|_| px(0.0)), - state.inset(), - state.tiling, - ) - .map(|v| v.0 as i32) - .map_size(|v| if v <= 0 { 1 } else { v }); - - state.xdg_surface.set_window_geometry( - window_geometry.origin.x, - window_geometry.origin.y, - window_geometry.size.width, - window_geometry.size.height, - ); - - let request_frame_callback = !state.acknowledged_first_configure; - if request_frame_callback { - state.acknowledged_first_configure = true; drop(state); - self.frame(); + if let Some(size) = configure.size { + self.resize(size); + } } } - _ => {} + let mut state = self.state.borrow_mut(); + state.xdg_surface.ack_configure(serial); + + let window_geometry = inset_by_tiling( + state.bounds.map_origin(|_| px(0.0)), + state.inset(), + state.tiling, + ) + .map(|v| v.0 as i32) + .map_size(|v| if v <= 0 { 1 } else { v }); + + state.xdg_surface.set_window_geometry( + window_geometry.origin.x, + window_geometry.origin.y, + window_geometry.size.width, + window_geometry.size.height, + ); + + let request_frame_callback = !state.acknowledged_first_configure; + if request_frame_callback { + state.acknowledged_first_configure = true; + drop(state); + self.frame(); + } } } pub fn handle_toplevel_decoration_event(&self, event: zxdg_toplevel_decoration_v1::Event) { - match event { - zxdg_toplevel_decoration_v1::Event::Configure { mode } => match mode { + if let zxdg_toplevel_decoration_v1::Event::Configure { mode } = event { + match mode { WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ServerSide) => { self.state.borrow_mut().decorations = WindowDecorations::Server; if let Some(mut appearance_changed) = @@ -457,17 +454,13 @@ impl WaylandWindowStatePtr { WEnum::Unknown(v) => { log::warn!("Unknown decoration mode: {}", v); } - }, - _ => {} + } } } pub fn handle_fractional_scale_event(&self, event: wp_fractional_scale_v1::Event) { - match event { - wp_fractional_scale_v1::Event::PreferredScale { scale } => { - self.rescale(scale as f32 / 120.0); - } - _ => {} + if let wp_fractional_scale_v1::Event::PreferredScale { scale } = event { + self.rescale(scale as f32 / 120.0); } } @@ -669,8 +662,8 @@ impl WaylandWindowStatePtr { pub fn set_size_and_scale(&self, size: Option>, scale: Option) { let (size, scale) = { let mut state = self.state.borrow_mut(); - if size.map_or(true, |size| size == state.bounds.size) - && scale.map_or(true, |scale| scale == state.scale) + if size.is_none_or(|size| size == state.bounds.size) + && scale.is_none_or(|scale| scale == state.scale) { return; } @@ -713,21 +706,20 @@ impl WaylandWindowStatePtr { } pub fn handle_input(&self, input: PlatformInput) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().input { - if !fun(input.clone()).propagate { - return; - } + if let Some(ref mut fun) = self.callbacks.borrow_mut().input + && !fun(input.clone()).propagate + { + return; } - if let PlatformInput::KeyDown(event) = input { - if event.keystroke.modifiers.is_subset_of(&Modifiers::shift()) { - if let Some(key_char) = &event.keystroke.key_char { - let mut state = self.state.borrow_mut(); - if let Some(mut input_handler) = state.input_handler.take() { - drop(state); - input_handler.replace_text_in_range(None, key_char); - self.state.borrow_mut().input_handler = Some(input_handler); - } - } + if let PlatformInput::KeyDown(event) = input + && event.keystroke.modifiers.is_subset_of(&Modifiers::shift()) + && let Some(key_char) = &event.keystroke.key_char + { + let mut state = self.state.borrow_mut(); + if let Some(mut input_handler) = state.input_handler.take() { + drop(state); + input_handler.replace_text_in_range(None, key_char); + self.state.borrow_mut().input_handler = Some(input_handler); } } } @@ -1147,7 +1139,7 @@ fn update_window(mut state: RefMut) { } impl WindowDecorations { - fn to_xdg(&self) -> zxdg_toplevel_decoration_v1::Mode { + fn to_xdg(self) -> zxdg_toplevel_decoration_v1::Mode { match self { WindowDecorations::Client => zxdg_toplevel_decoration_v1::Mode::ClientSide, WindowDecorations::Server => zxdg_toplevel_decoration_v1::Mode::ServerSide, @@ -1156,7 +1148,7 @@ impl WindowDecorations { } impl ResizeEdge { - fn to_xdg(&self) -> xdg_toplevel::ResizeEdge { + fn to_xdg(self) -> xdg_toplevel::ResizeEdge { match self { ResizeEdge::Top => xdg_toplevel::ResizeEdge::Top, ResizeEdge::TopRight => xdg_toplevel::ResizeEdge::TopRight, diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 573e4addf75b90d50e7f453555507462280fb3d4..9a43bd64706ec21905b18b8837af2ddc785cba87 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -232,15 +232,12 @@ impl X11ClientStatePtr { }; let mut state = client.0.borrow_mut(); - if let Some(window_ref) = state.windows.remove(&x_window) { - match window_ref.refresh_state { - Some(RefreshState::PeriodicRefresh { - event_loop_token, .. - }) => { - state.loop_handle.remove(event_loop_token); - } - _ => {} - } + if let Some(window_ref) = state.windows.remove(&x_window) + && let Some(RefreshState::PeriodicRefresh { + event_loop_token, .. + }) = window_ref.refresh_state + { + state.loop_handle.remove(event_loop_token); } if state.mouse_focused_window == Some(x_window) { state.mouse_focused_window = None; @@ -459,7 +456,7 @@ impl X11Client { move |event, _, client| match event { XDPEvent::WindowAppearance(appearance) => { client.with_common(|common| common.appearance = appearance); - for (_, window) in &mut client.0.borrow_mut().windows { + for window in client.0.borrow_mut().windows.values_mut() { window.window.set_appearance(appearance); } } @@ -565,10 +562,10 @@ impl X11Client { events.push(last_keymap_change_event); } - if let Some(last_press) = last_key_press.as_ref() { - if last_press.detail == key_press.detail { - continue; - } + if let Some(last_press) = last_key_press.as_ref() + && last_press.detail == key_press.detail + { + continue; } if let Some(Event::KeyRelease(key_release)) = @@ -642,13 +639,7 @@ impl X11Client { let xim_connected = xim_handler.connected; drop(state); - let xim_filtered = match ximc.filter_event(&event, &mut xim_handler) { - Ok(handled) => handled, - Err(err) => { - log::error!("XIMClientError: {}", err); - false - } - }; + let xim_filtered = ximc.filter_event(&event, &mut xim_handler); let xim_callback_event = xim_handler.last_callback_event.take(); let mut state = self.0.borrow_mut(); @@ -659,14 +650,28 @@ impl X11Client { self.handle_xim_callback_event(event); } - if xim_filtered { - continue; - } - - if xim_connected { - self.xim_handle_event(event); - } else { - self.handle_event(event); + match xim_filtered { + Ok(handled) => { + if handled { + continue; + } + if xim_connected { + self.xim_handle_event(event); + } else { + self.handle_event(event); + } + } + Err(err) => { + // this might happen when xim server crashes on one of the events + // we do lose 1-2 keys when crash happens since there is no reliable way to get that info + // luckily, x11 sends us window not found error when xim server crashes upon further key press + // hence we fall back to handle_event + log::error!("XIMClientError: {}", err); + let mut state = self.0.borrow_mut(); + state.take_xim(); + drop(state); + self.handle_event(event); + } } } } @@ -868,22 +873,19 @@ impl X11Client { let Some(reply) = reply else { return Some(()); }; - match str::from_utf8(&reply.value) { - Ok(file_list) => { - let paths: SmallVec<[_; 2]> = file_list - .lines() - .filter_map(|path| Url::parse(path).log_err()) - .filter_map(|url| url.to_file_path().log_err()) - .collect(); - let input = PlatformInput::FileDrop(FileDropEvent::Entered { - position: state.xdnd_state.position, - paths: crate::ExternalPaths(paths), - }); - drop(state); - window.handle_input(input); - self.0.borrow_mut().xdnd_state.retrieved = true; - } - Err(_) => {} + if let Ok(file_list) = str::from_utf8(&reply.value) { + let paths: SmallVec<[_; 2]> = file_list + .lines() + .filter_map(|path| Url::parse(path).log_err()) + .filter_map(|url| url.to_file_path().log_err()) + .collect(); + let input = PlatformInput::FileDrop(FileDropEvent::Entered { + position: state.xdnd_state.position, + paths: crate::ExternalPaths(paths), + }); + drop(state); + window.handle_input(input); + self.0.borrow_mut().xdnd_state.retrieved = true; } } Event::ConfigureNotify(event) => { @@ -1204,7 +1206,7 @@ impl X11Client { state = self.0.borrow_mut(); if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) { - let scroll_delta = get_scroll_delta_and_update_state(&mut pointer, &event); + let scroll_delta = get_scroll_delta_and_update_state(pointer, &event); drop(state); if let Some(scroll_delta) = scroll_delta { window.handle_input(PlatformInput::ScrollWheel(make_scroll_wheel_event( @@ -1263,7 +1265,7 @@ impl X11Client { Event::XinputDeviceChanged(event) => { let mut state = self.0.borrow_mut(); if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) { - reset_pointer_device_scroll_positions(&mut pointer); + reset_pointer_device_scroll_positions(pointer); } } _ => {} @@ -1327,7 +1329,7 @@ impl X11Client { state.composing = false; drop(state); if let Some(mut keystroke) = keystroke { - keystroke.key_char = Some(text.clone()); + keystroke.key_char = Some(text); window.handle_input(PlatformInput::KeyDown(crate::KeyDownEvent { keystroke, is_held: false, @@ -1578,11 +1580,11 @@ impl LinuxClient for X11Client { fn read_from_primary(&self) -> Option { let state = self.0.borrow_mut(); - return state + state .clipboard .get_any(clipboard::ClipboardKind::Primary) .context("X11: Failed to read from clipboard (primary)") - .log_with_level(log::Level::Debug); + .log_with_level(log::Level::Debug) } fn read_from_clipboard(&self) -> Option { @@ -1595,11 +1597,11 @@ impl LinuxClient for X11Client { { return state.clipboard_item.clone(); } - return state + state .clipboard .get_any(clipboard::ClipboardKind::Clipboard) .context("X11: Failed to read from clipboard (clipboard)") - .log_with_level(log::Level::Debug); + .log_with_level(log::Level::Debug) } fn run(&self) { @@ -2002,12 +2004,12 @@ fn check_gtk_frame_extents_supported( } fn xdnd_is_atom_supported(atom: u32, atoms: &XcbAtoms) -> bool { - return atom == atoms.TEXT + atom == atoms.TEXT || atom == atoms.STRING || atom == atoms.UTF8_STRING || atom == atoms.TEXT_PLAIN || atom == atoms.TEXT_PLAIN_UTF8 - || atom == atoms.TextUriList; + || atom == atoms.TextUriList } fn xdnd_get_supported_atom( @@ -2027,16 +2029,15 @@ fn xdnd_get_supported_atom( ), ) .log_with_level(Level::Warn) + && let Some(atoms) = reply.value32() { - if let Some(atoms) = reply.value32() { - for atom in atoms { - if xdnd_is_atom_supported(atom, &supported_atoms) { - return atom; - } + for atom in atoms { + if xdnd_is_atom_supported(atom, supported_atoms) { + return atom; } } } - return 0; + 0 } fn xdnd_send_finished( @@ -2107,7 +2108,7 @@ fn current_pointer_device_states( .classes .iter() .filter_map(|class| class.data.as_scroll()) - .map(|class| *class) + .copied() .rev() .collect::>(); let old_state = scroll_values_to_preserve.get(&info.deviceid); @@ -2137,7 +2138,7 @@ fn current_pointer_device_states( if pointer_device_states.is_empty() { log::error!("Found no xinput mouse pointers."); } - return Some(pointer_device_states); + Some(pointer_device_states) } /// Returns true if the device is a pointer device. Does not include pointer device groups. @@ -2403,11 +2404,13 @@ fn legacy_get_randr_scale_factor(connection: &XCBConnection, root: u32) -> Optio let mut crtc_infos: HashMap = HashMap::default(); let mut valid_outputs: HashSet = HashSet::new(); for (crtc, cookie) in crtc_cookies { - if let Ok(reply) = cookie.reply() { - if reply.width > 0 && reply.height > 0 && !reply.outputs.is_empty() { - crtc_infos.insert(crtc, reply.clone()); - valid_outputs.extend(&reply.outputs); - } + if let Ok(reply) = cookie.reply() + && reply.width > 0 + && reply.height > 0 + && !reply.outputs.is_empty() + { + crtc_infos.insert(crtc, reply.clone()); + valid_outputs.extend(&reply.outputs); } } diff --git a/crates/gpui/src/platform/linux/x11/clipboard.rs b/crates/gpui/src/platform/linux/x11/clipboard.rs index 5d42eadaaf04e0ad7811b980e6d31b4bca935139..a6f96d38c4254da5a2f92261700126962c16e91c 100644 --- a/crates/gpui/src/platform/linux/x11/clipboard.rs +++ b/crates/gpui/src/platform/linux/x11/clipboard.rs @@ -1078,11 +1078,11 @@ impl Clipboard { } else { String::from_utf8(result.bytes).map_err(|_| Error::ConversionFailure)? }; - return Ok(ClipboardItem::new_string(text)); + Ok(ClipboardItem::new_string(text)) } pub fn is_owner(&self, selection: ClipboardKind) -> bool { - return self.inner.is_owner(selection).unwrap_or(false); + self.inner.is_owner(selection).unwrap_or(false) } } @@ -1120,25 +1120,25 @@ impl Drop for Clipboard { log::error!("Failed to flush the clipboard window. Error: {}", e); return; } - if let Some(global_cb) = global_cb { - if let Err(e) = global_cb.server_handle.join() { - // Let's try extracting the error message - let message; - if let Some(msg) = e.downcast_ref::<&'static str>() { - message = Some((*msg).to_string()); - } else if let Some(msg) = e.downcast_ref::() { - message = Some(msg.clone()); - } else { - message = None; - } - if let Some(message) = message { - log::error!( - "The clipboard server thread panicked. Panic message: '{}'", - message, - ); - } else { - log::error!("The clipboard server thread panicked."); - } + if let Some(global_cb) = global_cb + && let Err(e) = global_cb.server_handle.join() + { + // Let's try extracting the error message + let message; + if let Some(msg) = e.downcast_ref::<&'static str>() { + message = Some((*msg).to_string()); + } else if let Some(msg) = e.downcast_ref::() { + message = Some(msg.clone()); + } else { + message = None; + } + if let Some(message) = message { + log::error!( + "The clipboard server thread panicked. Panic message: '{}'", + message, + ); + } else { + log::error!("The clipboard server thread panicked."); } } } diff --git a/crates/gpui/src/platform/linux/x11/event.rs b/crates/gpui/src/platform/linux/x11/event.rs index cd4cef24a33f33aaa2f2e685089eb1a2368719e2..17bcc908d3a6bdd48f16a8f5db69f08290b9444f 100644 --- a/crates/gpui/src/platform/linux/x11/event.rs +++ b/crates/gpui/src/platform/linux/x11/event.rs @@ -73,8 +73,8 @@ pub(crate) fn get_valuator_axis_index( // valuator present in this event's axisvalues. Axisvalues is ordered from // lowest valuator number to highest, so counting bits before the 1 bit for // this valuator yields the index in axisvalues. - if bit_is_set_in_vec(&valuator_mask, valuator_number) { - Some(popcount_upto_bit_index(&valuator_mask, valuator_number) as usize) + if bit_is_set_in_vec(valuator_mask, valuator_number) { + Some(popcount_upto_bit_index(valuator_mask, valuator_number) as usize) } else { None } @@ -104,7 +104,7 @@ fn bit_is_set_in_vec(bit_vec: &Vec, bit_index: u16) -> bool { let array_index = bit_index as usize / 32; bit_vec .get(array_index) - .map_or(false, |bits| bit_is_set(*bits, bit_index % 32)) + .is_some_and(|bits| bit_is_set(*bits, bit_index % 32)) } fn bit_is_set(bits: u32, bit_index: u16) -> bool { diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index 1a3c323c35129b9ea56595b7f81775de4b036454..6af943b31761dc26b2cde4090cad4ce6574dd5c9 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -95,7 +95,7 @@ fn query_render_extent( } impl ResizeEdge { - fn to_moveresize(&self) -> u32 { + fn to_moveresize(self) -> u32 { match self { ResizeEdge::TopLeft => 0, ResizeEdge::Top => 1, @@ -397,7 +397,7 @@ impl X11WindowState { .display_id .map_or(x_main_screen_index, |did| did.0 as usize); - let visual_set = find_visuals(&xcb, x_screen_index); + let visual_set = find_visuals(xcb, x_screen_index); let visual = match visual_set.transparent { Some(visual) => visual, @@ -515,19 +515,19 @@ impl X11WindowState { xcb.configure_window(x_window, &xproto::ConfigureWindowAux::new().x(x).y(y)), )?; } - if let Some(titlebar) = params.titlebar { - if let Some(title) = titlebar.title { - check_reply( - || "X11 ChangeProperty8 on window title failed.", - xcb.change_property8( - xproto::PropMode::REPLACE, - x_window, - xproto::AtomEnum::WM_NAME, - xproto::AtomEnum::STRING, - title.as_bytes(), - ), - )?; - } + if let Some(titlebar) = params.titlebar + && let Some(title) = titlebar.title + { + check_reply( + || "X11 ChangeProperty8 on window title failed.", + xcb.change_property8( + xproto::PropMode::REPLACE, + x_window, + xproto::AtomEnum::WM_NAME, + xproto::AtomEnum::STRING, + title.as_bytes(), + ), + )?; } if params.kind == WindowKind::PopUp { check_reply( @@ -604,7 +604,7 @@ impl X11WindowState { ), )?; - xcb_flush(&xcb); + xcb_flush(xcb); let renderer = { let raw_window = RawWindow { @@ -664,7 +664,7 @@ impl X11WindowState { || "X11 DestroyWindow failed while cleaning it up after setup failure.", xcb.destroy_window(x_window), )?; - xcb_flush(&xcb); + xcb_flush(xcb); } setup_result @@ -956,10 +956,10 @@ impl X11WindowStatePtr { } pub fn handle_input(&self, input: PlatformInput) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().input { - if !fun(input.clone()).propagate { - return; - } + if let Some(ref mut fun) = self.callbacks.borrow_mut().input + && !fun(input.clone()).propagate + { + return; } if let PlatformInput::KeyDown(event) = input { // only allow shift modifier when inserting text @@ -1068,15 +1068,14 @@ impl X11WindowStatePtr { } let mut callbacks = self.callbacks.borrow_mut(); - if let Some((content_size, scale_factor)) = resize_args { - if let Some(ref mut fun) = callbacks.resize { - fun(content_size, scale_factor) - } + if let Some((content_size, scale_factor)) = resize_args + && let Some(ref mut fun) = callbacks.resize + { + fun(content_size, scale_factor) } - if !is_resize { - if let Some(ref mut fun) = callbacks.moved { - fun(); - } + + if !is_resize && let Some(ref mut fun) = callbacks.moved { + fun(); } Ok(()) diff --git a/crates/gpui/src/platform/mac/events.rs b/crates/gpui/src/platform/mac/events.rs index 0dc361b9dcfdb0980561037484cf51b84dc251e8..938db4b76205ee43eb979995c240b8d96e2aa57a 100644 --- a/crates/gpui/src/platform/mac/events.rs +++ b/crates/gpui/src/platform/mac/events.rs @@ -311,9 +311,8 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { let mut shift = modifiers.contains(NSEventModifierFlags::NSShiftKeyMask); let command = modifiers.contains(NSEventModifierFlags::NSCommandKeyMask); let function = modifiers.contains(NSEventModifierFlags::NSFunctionKeyMask) - && first_char.map_or(true, |ch| { - !(NSUpArrowFunctionKey..=NSModeSwitchFunctionKey).contains(&ch) - }); + && first_char + .is_none_or(|ch| !(NSUpArrowFunctionKey..=NSModeSwitchFunctionKey).contains(&ch)); #[allow(non_upper_case_globals)] let key = match first_char { @@ -427,7 +426,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { key_char = Some(chars_for_modified_key(native_event.keyCode(), mods)); } - let mut key = if shift + if shift && chars_ignoring_modifiers .chars() .all(|c| c.is_ascii_lowercase()) @@ -438,9 +437,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { chars_with_shift } else { chars_ignoring_modifiers - }; - - key + } } }; diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index 629654014d5a15632c5992d9347cab3ee1fd28d9..9e5d6ec5ff02c74b4f0acfada8eee3d002bfd06b 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -314,6 +314,15 @@ impl MetalRenderer { } fn update_path_intermediate_textures(&mut self, size: Size) { + // We are uncertain when this happens, but sometimes size can be 0 here. Most likely before + // the layout pass on window creation. Zero-sized texture creation causes SIGABRT. + // https://github.com/zed-industries/zed/issues/36229 + if size.width.0 <= 0 || size.height.0 <= 0 { + self.path_intermediate_texture = None; + self.path_intermediate_msaa_texture = None; + return; + } + let texture_descriptor = metal::TextureDescriptor::new(); texture_descriptor.set_width(size.width.0 as u64); texture_descriptor.set_height(size.height.0 as u64); @@ -323,7 +332,7 @@ impl MetalRenderer { self.path_intermediate_texture = Some(self.device.new_texture(&texture_descriptor)); if self.path_sample_count > 1 { - let mut msaa_descriptor = texture_descriptor.clone(); + let mut msaa_descriptor = texture_descriptor; msaa_descriptor.set_texture_type(metal::MTLTextureType::D2Multisample); msaa_descriptor.set_storage_mode(metal::MTLStorageMode::Private); msaa_descriptor.set_sample_count(self.path_sample_count as _); @@ -436,14 +445,14 @@ impl MetalRenderer { instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ), PrimitiveBatch::Quads(quads) => self.draw_quads( quads, instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ), PrimitiveBatch::Paths(paths) => { command_encoder.end_encoding(); @@ -471,7 +480,7 @@ impl MetalRenderer { instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ) } else { false @@ -482,7 +491,7 @@ impl MetalRenderer { instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ), PrimitiveBatch::MonochromeSprites { texture_id, @@ -493,7 +502,7 @@ impl MetalRenderer { instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ), PrimitiveBatch::PolychromeSprites { texture_id, @@ -504,14 +513,14 @@ impl MetalRenderer { instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ), PrimitiveBatch::Surfaces(surfaces) => self.draw_surfaces( surfaces, instance_buffer, &mut instance_offset, viewport_size, - &command_encoder, + command_encoder, ), }; if !ok { @@ -754,7 +763,7 @@ impl MetalRenderer { viewport_size: Size, command_encoder: &metal::RenderCommandEncoderRef, ) -> bool { - let Some(ref first_path) = paths.first() else { + let Some(first_path) = paths.first() else { return true; }; diff --git a/crates/gpui/src/platform/mac/open_type.rs b/crates/gpui/src/platform/mac/open_type.rs index 2ae5e8f87ab78a70e423a4645c96e69f098828a6..37a29559fdfbc284ffd1021cc6c2c6ed717ca228 100644 --- a/crates/gpui/src/platform/mac/open_type.rs +++ b/crates/gpui/src/platform/mac/open_type.rs @@ -35,14 +35,14 @@ pub fn apply_features_and_fallbacks( unsafe { let mut keys = vec![kCTFontFeatureSettingsAttribute]; let mut values = vec![generate_feature_array(features)]; - if let Some(fallbacks) = fallbacks { - if !fallbacks.fallback_list().is_empty() { - keys.push(kCTFontCascadeListAttribute); - values.push(generate_fallback_array( - fallbacks, - font.native_font().as_concrete_TypeRef(), - )); - } + if let Some(fallbacks) = fallbacks + && !fallbacks.fallback_list().is_empty() + { + keys.push(kCTFontCascadeListAttribute); + values.push(generate_fallback_array( + fallbacks, + font.native_font().as_concrete_TypeRef(), + )); } let attrs = CFDictionaryCreate( kCFAllocatorDefault, diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index 1d2146cf73562beed6c26754396dc2c4c0c915f9..832550dc46281c56749aa8f6bc4d59a041c9a00d 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -7,9 +7,9 @@ use super::{ use crate::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem, ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, KeyContext, Keymap, MacDispatcher, - MacDisplay, MacWindow, Menu, MenuItem, OwnedMenu, PathPromptOptions, Platform, PlatformDisplay, - PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task, - WindowAppearance, WindowParams, hash, + MacDisplay, MacWindow, Menu, MenuItem, OsMenu, OwnedMenu, PathPromptOptions, Platform, + PlatformDisplay, PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow, Result, + SemanticVersion, SystemMenuType, Task, WindowAppearance, WindowParams, hash, }; use anyhow::{Context as _, anyhow}; use block::ConcreteBlock; @@ -47,7 +47,7 @@ use objc::{ use parking_lot::Mutex; use ptr::null_mut; use std::{ - cell::{Cell, LazyCell}, + cell::Cell, convert::TryInto, ffi::{CStr, OsStr, c_void}, os::{raw::c_char, unix::ffi::OsStrExt}, @@ -56,7 +56,7 @@ use std::{ ptr, rc::Rc, slice, str, - sync::Arc, + sync::{Arc, OnceLock}, }; use strum::IntoEnumIterator; use util::ResultExt; @@ -296,18 +296,7 @@ impl MacPlatform { actions: &mut Vec>, keymap: &Keymap, ) -> id { - const DEFAULT_CONTEXT: LazyCell> = LazyCell::new(|| { - let mut workspace_context = KeyContext::new_with_defaults(); - workspace_context.add("Workspace"); - let mut pane_context = KeyContext::new_with_defaults(); - pane_context.add("Pane"); - let mut editor_context = KeyContext::new_with_defaults(); - editor_context.add("Editor"); - - pane_context.extend(&editor_context); - workspace_context.extend(&pane_context); - vec![workspace_context] - }); + static DEFAULT_CONTEXT: OnceLock> = OnceLock::new(); unsafe { match item { @@ -323,9 +312,20 @@ impl MacPlatform { let keystrokes = keymap .bindings_for_action(action.as_ref()) .find_or_first(|binding| { - binding - .predicate() - .is_none_or(|predicate| predicate.eval(&DEFAULT_CONTEXT)) + binding.predicate().is_none_or(|predicate| { + predicate.eval(DEFAULT_CONTEXT.get_or_init(|| { + let mut workspace_context = KeyContext::new_with_defaults(); + workspace_context.add("Workspace"); + let mut pane_context = KeyContext::new_with_defaults(); + pane_context.add("Pane"); + let mut editor_context = KeyContext::new_with_defaults(); + editor_context.add("Editor"); + + pane_context.extend(&editor_context); + workspace_context.extend(&pane_context); + vec![workspace_context] + })) + }) }) .map(|binding| binding.keystrokes()); @@ -371,7 +371,7 @@ impl MacPlatform { item = NSMenuItem::alloc(nil) .initWithTitle_action_keyEquivalent_( - ns_string(&name), + ns_string(name), selector, ns_string(key_to_native(&keystroke.key).as_ref()), ) @@ -383,7 +383,7 @@ impl MacPlatform { } else { item = NSMenuItem::alloc(nil) .initWithTitle_action_keyEquivalent_( - ns_string(&name), + ns_string(name), selector, ns_string(""), ) @@ -392,7 +392,7 @@ impl MacPlatform { } else { item = NSMenuItem::alloc(nil) .initWithTitle_action_keyEquivalent_( - ns_string(&name), + ns_string(name), selector, ns_string(""), ) @@ -412,10 +412,21 @@ impl MacPlatform { submenu.addItem_(Self::create_menu_item(item, delegate, actions, keymap)); } item.setSubmenu_(submenu); - item.setTitle_(ns_string(&name)); - if name == "Services" { - let app: id = msg_send![APP_CLASS, sharedApplication]; - app.setServicesMenu_(item); + item.setTitle_(ns_string(name)); + item + } + MenuItem::SystemMenu(OsMenu { name, menu_type }) => { + let item = NSMenuItem::new(nil).autorelease(); + let submenu = NSMenu::new(nil).autorelease(); + submenu.setDelegate_(delegate); + item.setSubmenu_(submenu); + item.setTitle_(ns_string(name)); + + match menu_type { + SystemMenuType::Services => { + let app: id = msg_send![APP_CLASS, sharedApplication]; + app.setServicesMenu_(item); + } } item @@ -694,6 +705,7 @@ impl Platform for MacPlatform { panel.setCanChooseDirectories_(options.directories.to_objc()); panel.setCanChooseFiles_(options.files.to_objc()); panel.setAllowsMultipleSelection_(options.multiple.to_objc()); + panel.setCanCreateDirectories(true.to_objc()); panel.setResolvesAliases_(false.to_objc()); let done_tx = Cell::new(Some(done_tx)); @@ -703,10 +715,10 @@ impl Platform for MacPlatform { let urls = panel.URLs(); for i in 0..urls.count() { let url = urls.objectAtIndex(i); - if url.isFileURL() == YES { - if let Ok(path) = ns_url_to_path(url) { - result.push(path) - } + if url.isFileURL() == YES + && let Ok(path) = ns_url_to_path(url) + { + result.push(path) } } Some(result) @@ -719,6 +731,11 @@ impl Platform for MacPlatform { } }); let block = block.copy(); + + if let Some(prompt) = options.prompt { + let _: () = msg_send![panel, setPrompt: ns_string(&prompt)]; + } + let _: () = msg_send![panel, beginWithCompletionHandler: block]; } }) @@ -726,8 +743,13 @@ impl Platform for MacPlatform { done_rx } - fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver>> { + fn prompt_for_new_path( + &self, + directory: &Path, + suggested_name: Option<&str>, + ) -> oneshot::Receiver>> { let directory = directory.to_owned(); + let suggested_name = suggested_name.map(|s| s.to_owned()); let (done_tx, done_rx) = oneshot::channel(); self.foreground_executor() .spawn(async move { @@ -737,6 +759,11 @@ impl Platform for MacPlatform { let url = NSURL::fileURLWithPath_isDirectory_(nil, path, true.to_objc()); panel.setDirectoryURL(url); + if let Some(suggested_name) = suggested_name { + let name_string = ns_string(&suggested_name); + let _: () = msg_send![panel, setNameFieldStringValue: name_string]; + } + let done_tx = Cell::new(Some(done_tx)); let block = ConcreteBlock::new(move |response: NSModalResponse| { let mut result = None; @@ -759,17 +786,18 @@ impl Platform for MacPlatform { // This is conditional on OS version because I'd like to get rid of it, so that // you can manually create a file called `a.sql.s`. That said it seems better // to break that use-case than breaking `a.sql`. - if chunks.len() == 3 && chunks[1].starts_with(chunks[2]) { - if Self::os_version() >= SemanticVersion::new(15, 0, 0) { - let new_filename = OsStr::from_bytes( - &filename.as_bytes() - [..chunks[0].len() + 1 + chunks[1].len()], - ) - .to_owned(); - result.set_file_name(&new_filename); - } + if chunks.len() == 3 + && chunks[1].starts_with(chunks[2]) + && Self::os_version() >= SemanticVersion::new(15, 0, 0) + { + let new_filename = OsStr::from_bytes( + &filename.as_bytes() + [..chunks[0].len() + 1 + chunks[1].len()], + ) + .to_owned(); + result.set_file_name(&new_filename); } - return result; + result }) } } diff --git a/crates/gpui/src/platform/mac/shaders.metal b/crates/gpui/src/platform/mac/shaders.metal index f9d5bdbf4c4ae1fa6ce098463ce63701a7019bbc..83c978b853443d5c612f514625f94b6d6725be8a 100644 --- a/crates/gpui/src/platform/mac/shaders.metal +++ b/crates/gpui/src/platform/mac/shaders.metal @@ -567,15 +567,20 @@ vertex UnderlineVertexOutput underline_vertex( fragment float4 underline_fragment(UnderlineFragmentInput input [[stage_in]], constant Underline *underlines [[buffer(UnderlineInputIndex_Underlines)]]) { + const float WAVE_FREQUENCY = 2.0; + const float WAVE_HEIGHT_RATIO = 0.8; + Underline underline = underlines[input.underline_id]; if (underline.wavy) { float half_thickness = underline.thickness * 0.5; float2 origin = float2(underline.bounds.origin.x, underline.bounds.origin.y); + float2 st = ((input.position.xy - origin) / underline.bounds.size.height) - float2(0., 0.5); - float frequency = (M_PI_F * (3. * underline.thickness)) / 8.; - float amplitude = 1. / (2. * underline.thickness); + float frequency = (M_PI_F * WAVE_FREQUENCY * underline.thickness) / underline.bounds.size.height; + float amplitude = (underline.thickness * WAVE_HEIGHT_RATIO) / underline.bounds.size.height; + float sine = sin(st.x * frequency) * amplitude; float dSine = cos(st.x * frequency) * amplitude * frequency; float distance = (st.y - sine) / sqrt(1. + dSine * dSine); diff --git a/crates/gpui/src/platform/mac/text_system.rs b/crates/gpui/src/platform/mac/text_system.rs index c45888bce7172db0ec940d78549937e77657d6e3..72a0f2e565d9937e3aaf4082b663c3e2ae6ac91d 100644 --- a/crates/gpui/src/platform/mac/text_system.rs +++ b/crates/gpui/src/platform/mac/text_system.rs @@ -211,11 +211,7 @@ impl MacTextSystemState { features: &FontFeatures, fallbacks: Option<&FontFallbacks>, ) -> Result> { - let name = if name == ".SystemUIFont" { - ".AppleSystemUIFont" - } else { - name - }; + let name = crate::text_system::font_name_with_fallbacks(name, ".AppleSystemUIFont"); let mut font_ids = SmallVec::new(); let family = self @@ -323,7 +319,7 @@ impl MacTextSystemState { fn is_emoji(&self, font_id: FontId) -> bool { self.postscript_names_by_font_id .get(&font_id) - .map_or(false, |postscript_name| { + .is_some_and(|postscript_name| { postscript_name == "AppleColorEmoji" || postscript_name == ".AppleColorEmojiUI" }) } diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index aedf131909a6956e9a4501b107c81ce242b80a49..4425d4fe24c91a0bcf840b59de139ecf4f8187b0 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -653,7 +653,7 @@ impl MacWindow { .and_then(|titlebar| titlebar.traffic_light_position), transparent_titlebar: titlebar .as_ref() - .map_or(true, |titlebar| titlebar.appears_transparent), + .is_none_or(|titlebar| titlebar.appears_transparent), previous_modifiers_changed_event: None, keystroke_for_do_command: None, do_command_handled: None, @@ -688,7 +688,7 @@ impl MacWindow { }); } - if titlebar.map_or(true, |titlebar| titlebar.appears_transparent) { + if titlebar.is_none_or(|titlebar| titlebar.appears_transparent) { native_window.setTitlebarAppearsTransparent_(YES); native_window.setTitleVisibility_(NSWindowTitleVisibility::NSWindowTitleHidden); } @@ -1090,7 +1090,7 @@ impl PlatformWindow for MacWindow { NSView::removeFromSuperview(blur_view); this.blurred_view = None; } - } else if this.blurred_view == None { + } else if this.blurred_view.is_none() { let content_view = this.native_window.contentView(); let frame = NSView::bounds(content_view); let mut blur_view: id = msg_send![BLURRED_VIEW_CLASS, alloc]; @@ -1478,18 +1478,18 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: return YES; } - if key_down_event.is_held { - if let Some(key_char) = key_down_event.keystroke.key_char.as_ref() { - let handled = with_input_handler(&this, |input_handler| { - if !input_handler.apple_press_and_hold_enabled() { - input_handler.replace_text_in_range(None, &key_char); - return YES; - } - NO - }); - if handled == Some(YES) { + if key_down_event.is_held + && let Some(key_char) = key_down_event.keystroke.key_char.as_ref() + { + let handled = with_input_handler(this, |input_handler| { + if !input_handler.apple_press_and_hold_enabled() { + input_handler.replace_text_in_range(None, key_char); return YES; } + NO + }); + if handled == Some(YES) { + return YES; } } @@ -1624,10 +1624,10 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { modifiers: prev_modifiers, capslock: prev_capslock, })) = &lock.previous_modifiers_changed_event + && prev_modifiers == modifiers + && prev_capslock == capslock { - if prev_modifiers == modifiers && prev_capslock == capslock { - return; - } + return; } lock.previous_modifiers_changed_event = Some(event.clone()); @@ -1949,7 +1949,7 @@ extern "C" fn insert_text(this: &Object, _: Sel, text: id, replacement_range: NS let text = text.to_str(); let replacement_range = replacement_range.to_range(); with_input_handler(this, |input_handler| { - input_handler.replace_text_in_range(replacement_range, &text) + input_handler.replace_text_in_range(replacement_range, text) }); } } @@ -1973,7 +1973,7 @@ extern "C" fn set_marked_text( let replacement_range = replacement_range.to_range(); let text = text.to_str(); with_input_handler(this, |input_handler| { - input_handler.replace_and_mark_text_in_range(replacement_range, &text, selected_range) + input_handler.replace_and_mark_text_in_range(replacement_range, text, selected_range) }); } } @@ -1995,10 +1995,10 @@ extern "C" fn attributed_substring_for_proposed_range( let mut adjusted: Option> = None; let selected_text = input_handler.text_for_range(range.clone(), &mut adjusted)?; - if let Some(adjusted) = adjusted { - if adjusted != range { - unsafe { (actual_range as *mut NSRange).write(NSRange::from(adjusted)) }; - } + if let Some(adjusted) = adjusted + && adjusted != range + { + unsafe { (actual_range as *mut NSRange).write(NSRange::from(adjusted)) }; } unsafe { let string: id = msg_send![class!(NSAttributedString), alloc]; @@ -2063,8 +2063,8 @@ fn screen_point_to_gpui_point(this: &Object, position: NSPoint) -> Point let frame = get_frame(this); let window_x = position.x - frame.origin.x; let window_y = frame.size.height - (position.y - frame.origin.y); - let position = point(px(window_x as f32), px(window_y as f32)); - position + + point(px(window_x as f32), px(window_y as f32)) } extern "C" fn dragging_entered(this: &Object, _: Sel, dragging_info: id) -> NSDragOperation { @@ -2073,11 +2073,10 @@ extern "C" fn dragging_entered(this: &Object, _: Sel, dragging_info: id) -> NSDr let paths = external_paths_from_event(dragging_info); if let Some(event) = paths.map(|paths| PlatformInput::FileDrop(FileDropEvent::Entered { position, paths })) + && send_new_event(&window_state, event) { - if send_new_event(&window_state, event) { - window_state.lock().external_files_dragged = true; - return NSDragOperationCopy; - } + window_state.lock().external_files_dragged = true; + return NSDragOperationCopy; } NSDragOperationNone } diff --git a/crates/gpui/src/platform/scap_screen_capture.rs b/crates/gpui/src/platform/scap_screen_capture.rs index 32041b655fdc20b046717291c623dcb5c4d5146c..d6d19cd8102d58ceaa9bc87bff348eaeda9adfef 100644 --- a/crates/gpui/src/platform/scap_screen_capture.rs +++ b/crates/gpui/src/platform/scap_screen_capture.rs @@ -228,7 +228,7 @@ fn run_capture( display, size, })); - if let Err(_) = stream_send_result { + if stream_send_result.is_err() { return; } while !cancel_stream.load(std::sync::atomic::Ordering::SeqCst) { diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index 16edabfa4bfee9c66dcf6ed8abc5eeb7957a7fa0..4ce62c4bdcae60d517dd88501cb89af8fee2c9bc 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -78,11 +78,11 @@ impl TestDispatcher { let state = self.state.lock(); let next_due_time = state.delayed.first().map(|(time, _)| *time); drop(state); - if let Some(due_time) = next_due_time { - if due_time <= new_now { - self.state.lock().time = due_time; - continue; - } + if let Some(due_time) = next_due_time + && due_time <= new_now + { + self.state.lock().time = due_time; + continue; } break; } @@ -270,9 +270,7 @@ impl PlatformDispatcher for TestDispatcher { fn dispatch(&self, runnable: Runnable, label: Option) { { let mut state = self.state.lock(); - if label.map_or(false, |label| { - state.deprioritized_task_labels.contains(&label) - }) { + if label.is_some_and(|label| state.deprioritized_task_labels.contains(&label)) { state.deprioritized_background.push(runnable); } else { state.background.push(runnable); diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index a26b65576cc49e290494762eed597d5bd8d0af26..00afcd81b599cc53f12005062e4b87abd9c30e38 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -187,24 +187,24 @@ impl TestPlatform { .push_back(TestPrompt { msg: msg.to_string(), detail: detail.map(|s| s.to_string()), - answers: answers.clone(), + answers, tx, }); rx } pub(crate) fn set_active_window(&self, window: Option) { - let executor = self.foreground_executor().clone(); + let executor = self.foreground_executor(); let previous_window = self.active_window.borrow_mut().take(); self.active_window.borrow_mut().clone_from(&window); executor .spawn(async move { if let Some(previous_window) = previous_window { - if let Some(window) = window.as_ref() { - if Rc::ptr_eq(&previous_window.0, &window.0) { - return; - } + if let Some(window) = window.as_ref() + && Rc::ptr_eq(&previous_window.0, &window.0) + { + return; } previous_window.simulate_active_status_change(false); } @@ -336,6 +336,7 @@ impl Platform for TestPlatform { fn prompt_for_new_path( &self, directory: &std::path::Path, + _suggested_name: Option<&str>, ) -> oneshot::Receiver>> { let (tx, rx) = oneshot::channel(); self.background_executor() diff --git a/crates/gpui/src/platform/windows.rs b/crates/gpui/src/platform/windows.rs index 5268d3ccba217c996f1ab2f3f664bac2c1032627..77e0ca41bf8b394dc8bdd75e521aab3ba63dce2c 100644 --- a/crates/gpui/src/platform/windows.rs +++ b/crates/gpui/src/platform/windows.rs @@ -10,6 +10,7 @@ mod keyboard; mod platform; mod system_settings; mod util; +mod vsync; mod window; mod wrapper; @@ -25,6 +26,7 @@ pub(crate) use keyboard::*; pub(crate) use platform::*; pub(crate) use system_settings::*; pub(crate) use util::*; +pub(crate) use vsync::*; pub(crate) use window::*; pub(crate) use wrapper::*; diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index 587cb7b4a6c18aca2ee4f8a10e453eeca3ade037..a86a1fab62a404c4f49e785491bb2925a6f3cf61 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -498,8 +498,9 @@ impl DirectWriteState { ) .unwrap() } else { + let family = self.system_ui_font_name.clone(); self.find_font_id( - target_font.family.as_ref(), + font_name_with_fallbacks(target_font.family.as_ref(), family.as_ref()), target_font.weight, target_font.style, &target_font.features, @@ -512,7 +513,6 @@ impl DirectWriteState { } #[cfg(not(any(test, feature = "test-support")))] { - let family = self.system_ui_font_name.clone(); log::error!("{} not found, use {} instead.", target_font.family, family); self.get_font_id_from_font_collection( family.as_ref(), @@ -850,7 +850,7 @@ impl DirectWriteState { } let bitmap_data = if params.is_emoji { - if let Ok(color) = self.rasterize_color(¶ms, glyph_bounds) { + if let Ok(color) = self.rasterize_color(params, glyph_bounds) { color } else { let monochrome = self.rasterize_monochrome(params, glyph_bounds)?; @@ -1784,7 +1784,7 @@ fn apply_font_features( } unsafe { - direct_write_features.AddFontFeature(make_direct_write_feature(&tag, *value))?; + direct_write_features.AddFontFeature(make_direct_write_feature(tag, *value))?; } } unsafe { diff --git a/crates/gpui/src/platform/windows/directx_renderer.rs b/crates/gpui/src/platform/windows/directx_renderer.rs index ac285b79acee82571456943d2f660947687b56c3..f84a1c1b6d0d158684e4c6cad6edbf72105425e0 100644 --- a/crates/gpui/src/platform/windows/directx_renderer.rs +++ b/crates/gpui/src/platform/windows/directx_renderer.rs @@ -207,7 +207,7 @@ impl DirectXRenderer { fn present(&mut self) -> Result<()> { unsafe { - let result = self.resources.swap_chain.Present(1, DXGI_PRESENT(0)); + let result = self.resources.swap_chain.Present(0, DXGI_PRESENT(0)); // Presenting the swap chain can fail if the DirectX device was removed or reset. if result == DXGI_ERROR_DEVICE_REMOVED || result == DXGI_ERROR_DEVICE_RESET { let reason = self.devices.device.GetDeviceRemovedReason(); @@ -758,7 +758,7 @@ impl DirectXRenderPipelines { impl DirectComposition { pub fn new(dxgi_device: &IDXGIDevice, hwnd: HWND) -> Result { - let comp_device = get_comp_device(&dxgi_device)?; + let comp_device = get_comp_device(dxgi_device)?; let comp_target = unsafe { comp_device.CreateTargetForHwnd(hwnd, true) }?; let comp_visual = unsafe { comp_device.CreateVisual() }?; @@ -1144,7 +1144,7 @@ fn create_resources( [D3D11_VIEWPORT; 1], )> { let (render_target, render_target_view) = - create_render_target_and_its_view(&swap_chain, &devices.device)?; + create_render_target_and_its_view(swap_chain, &devices.device)?; let (path_intermediate_texture, path_intermediate_srv) = create_path_intermediate_texture(&devices.device, width, height)?; let (path_intermediate_msaa_texture, path_intermediate_msaa_view) = @@ -1624,11 +1624,10 @@ mod nvidia { os::raw::{c_char, c_int, c_uint}, }; - use anyhow::{Context, Result}; - use windows::{ - Win32::System::LibraryLoader::{GetProcAddress, LoadLibraryA}, - core::s, - }; + use anyhow::Result; + use windows::{Win32::System::LibraryLoader::GetProcAddress, core::s}; + + use crate::with_dll_library; // https://github.com/NVIDIA/nvapi/blob/7cb76fce2f52de818b3da497af646af1ec16ce27/nvapi_lite_common.h#L180 const NVAPI_SHORT_STRING_MAX: usize = 64; @@ -1645,13 +1644,12 @@ mod nvidia { ) -> c_int; pub(super) fn get_driver_version() -> Result { - unsafe { - // Try to load the NVIDIA driver DLL - #[cfg(target_pointer_width = "64")] - let nvidia_dll = LoadLibraryA(s!("nvapi64.dll")).context("Can't load nvapi64.dll")?; - #[cfg(target_pointer_width = "32")] - let nvidia_dll = LoadLibraryA(s!("nvapi.dll")).context("Can't load nvapi.dll")?; + #[cfg(target_pointer_width = "64")] + let nvidia_dll_name = s!("nvapi64.dll"); + #[cfg(target_pointer_width = "32")] + let nvidia_dll_name = s!("nvapi.dll"); + with_dll_library(nvidia_dll_name, |nvidia_dll| unsafe { let nvapi_query_addr = GetProcAddress(nvidia_dll, s!("nvapi_QueryInterface")) .ok_or_else(|| anyhow::anyhow!("Failed to get nvapi_QueryInterface address"))?; let nvapi_query: extern "C" fn(u32) -> *mut () = std::mem::transmute(nvapi_query_addr); @@ -1686,18 +1684,17 @@ mod nvidia { minor, branch_string.to_string_lossy() )) - } + }) } } mod amd { use std::os::raw::{c_char, c_int, c_void}; - use anyhow::{Context, Result}; - use windows::{ - Win32::System::LibraryLoader::{GetProcAddress, LoadLibraryA}, - core::s, - }; + use anyhow::Result; + use windows::{Win32::System::LibraryLoader::GetProcAddress, core::s}; + + use crate::with_dll_library; // https://github.com/GPUOpen-LibrariesAndSDKs/AGS_SDK/blob/5d8812d703d0335741b6f7ffc37838eeb8b967f7/ags_lib/inc/amd_ags.h#L145 const AGS_CURRENT_VERSION: i32 = (6 << 22) | (3 << 12); @@ -1731,14 +1728,12 @@ mod amd { type agsDeInitialize_t = unsafe extern "C" fn(context: *mut AGSContext) -> c_int; pub(super) fn get_driver_version() -> Result { - unsafe { - #[cfg(target_pointer_width = "64")] - let amd_dll = - LoadLibraryA(s!("amd_ags_x64.dll")).context("Failed to load AMD AGS library")?; - #[cfg(target_pointer_width = "32")] - let amd_dll = - LoadLibraryA(s!("amd_ags_x86.dll")).context("Failed to load AMD AGS library")?; + #[cfg(target_pointer_width = "64")] + let amd_dll_name = s!("amd_ags_x64.dll"); + #[cfg(target_pointer_width = "32")] + let amd_dll_name = s!("amd_ags_x86.dll"); + with_dll_library(amd_dll_name, |amd_dll| unsafe { let ags_initialize_addr = GetProcAddress(amd_dll, s!("agsInitialize")) .ok_or_else(|| anyhow::anyhow!("Failed to get agsInitialize address"))?; let ags_deinitialize_addr = GetProcAddress(amd_dll, s!("agsDeInitialize")) @@ -1784,7 +1779,7 @@ mod amd { ags_deinitialize(context); Ok(format!("{} ({})", software_version, driver_version)) - } + }) } } diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 4ab257d27a69fc5fed458655150e1c09c3ebbba8..4def6a11a5f16f235b1d7018ecbbdec5565ab951 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -100,6 +100,7 @@ impl WindowsWindowInner { WM_SETCURSOR => self.handle_set_cursor(handle, lparam), WM_SETTINGCHANGE => self.handle_system_settings_changed(handle, wparam, lparam), WM_INPUTLANGCHANGE => self.handle_input_language_changed(lparam), + WM_SHOWWINDOW => self.handle_window_visibility_changed(handle, wparam), WM_GPUI_CURSOR_STYLE_CHANGED => self.handle_cursor_changed(lparam), WM_GPUI_FORCE_UPDATE_WINDOW => self.draw_window(handle, true), _ => None, @@ -700,29 +701,28 @@ impl WindowsWindowInner { // Fix auto hide taskbar not showing. This solution is based on the approach // used by Chrome. However, it may result in one row of pixels being obscured // in our client area. But as Chrome says, "there seems to be no better solution." - if is_maximized { - if let Some(ref taskbar_position) = self + if is_maximized + && let Some(ref taskbar_position) = self .state .borrow() .system_settings .auto_hide_taskbar_position - { - // Fot the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, - // so the window isn't treated as a "fullscreen app", which would cause - // the taskbar to disappear. - match taskbar_position { - AutoHideTaskbarPosition::Left => { - requested_client_rect[0].left += AUTO_HIDE_TASKBAR_THICKNESS_PX - } - AutoHideTaskbarPosition::Top => { - requested_client_rect[0].top += AUTO_HIDE_TASKBAR_THICKNESS_PX - } - AutoHideTaskbarPosition::Right => { - requested_client_rect[0].right -= AUTO_HIDE_TASKBAR_THICKNESS_PX - } - AutoHideTaskbarPosition::Bottom => { - requested_client_rect[0].bottom -= AUTO_HIDE_TASKBAR_THICKNESS_PX - } + { + // Fot the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, + // so the window isn't treated as a "fullscreen app", which would cause + // the taskbar to disappear. + match taskbar_position { + AutoHideTaskbarPosition::Left => { + requested_client_rect[0].left += AUTO_HIDE_TASKBAR_THICKNESS_PX + } + AutoHideTaskbarPosition::Top => { + requested_client_rect[0].top += AUTO_HIDE_TASKBAR_THICKNESS_PX + } + AutoHideTaskbarPosition::Right => { + requested_client_rect[0].right -= AUTO_HIDE_TASKBAR_THICKNESS_PX + } + AutoHideTaskbarPosition::Bottom => { + requested_client_rect[0].bottom -= AUTO_HIDE_TASKBAR_THICKNESS_PX } } } @@ -956,7 +956,7 @@ impl WindowsWindowInner { click_count, first_mouse: false, }); - let result = func(input.clone()); + let result = func(input); let handled = !result.propagate || result.default_prevented; self.state.borrow_mut().callbacks.input = Some(func); @@ -1124,27 +1124,22 @@ impl WindowsWindowInner { // lParam is a pointer to a string that indicates the area containing the system parameter // that was changed. let parameter = PCWSTR::from_raw(lparam.0 as _); - if unsafe { !parameter.is_null() && !parameter.is_empty() } { - if let Some(parameter_string) = unsafe { parameter.to_string() }.log_err() { - log::info!("System settings changed: {}", parameter_string); - match parameter_string.as_str() { - "ImmersiveColorSet" => { - let new_appearance = system_appearance() - .context( - "unable to get system appearance when handling ImmersiveColorSet", - ) - .log_err()?; - let mut lock = self.state.borrow_mut(); - if new_appearance != lock.appearance { - lock.appearance = new_appearance; - let mut callback = lock.callbacks.appearance_changed.take()?; - drop(lock); - callback(); - self.state.borrow_mut().callbacks.appearance_changed = Some(callback); - configure_dwm_dark_mode(handle, new_appearance); - } - } - _ => {} + if unsafe { !parameter.is_null() && !parameter.is_empty() } + && let Some(parameter_string) = unsafe { parameter.to_string() }.log_err() + { + log::info!("System settings changed: {}", parameter_string); + if parameter_string.as_str() == "ImmersiveColorSet" { + let new_appearance = system_appearance() + .context("unable to get system appearance when handling ImmersiveColorSet") + .log_err()?; + let mut lock = self.state.borrow_mut(); + if new_appearance != lock.appearance { + lock.appearance = new_appearance; + let mut callback = lock.callbacks.appearance_changed.take()?; + drop(lock); + callback(); + self.state.borrow_mut().callbacks.appearance_changed = Some(callback); + configure_dwm_dark_mode(handle, new_appearance); } } } @@ -1160,6 +1155,13 @@ impl WindowsWindowInner { Some(0) } + fn handle_window_visibility_changed(&self, handle: HWND, wparam: WPARAM) -> Option { + if wparam.0 == 1 { + self.draw_window(handle, false); + } + None + } + fn handle_device_change_msg(&self, handle: HWND, wparam: WPARAM) -> Option { if wparam.0 == DBT_DEVNODES_CHANGED as usize { // The reason for sending this message is to actually trigger a redraw of the window. @@ -1464,7 +1466,7 @@ pub(crate) fn current_modifiers() -> Modifiers { #[inline] pub(crate) fn current_capslock() -> Capslock { let on = unsafe { GetKeyState(VK_CAPITAL.0 as i32) & 1 } > 0; - Capslock { on: on } + Capslock { on } } fn get_client_area_insets( diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 01b043a755240868699ad7872b4e46202b5bd90d..6202e05fb3b26f10ba8fdf365a185dccbc6ae2ed 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -1,5 +1,6 @@ use std::{ cell::RefCell, + ffi::OsStr, mem::ManuallyDrop, path::{Path, PathBuf}, rc::Rc, @@ -32,7 +33,7 @@ use crate::*; pub(crate) struct WindowsPlatform { state: RefCell, - raw_window_handles: RwLock>, + raw_window_handles: Arc>>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, main_receiver: flume::Receiver, @@ -114,7 +115,7 @@ impl WindowsPlatform { }; let icon = load_icon().unwrap_or_default(); let state = RefCell::new(WindowsPlatformState::new()); - let raw_window_handles = RwLock::new(SmallVec::new()); + let raw_window_handles = Arc::new(RwLock::new(SmallVec::new())); let windows_version = WindowsVersion::new().context("Error retrieve windows version")?; Ok(Self { @@ -134,22 +135,12 @@ impl WindowsPlatform { }) } - fn redraw_all(&self) { - for handle in self.raw_window_handles.read().iter() { - unsafe { - RedrawWindow(Some(*handle), None, None, RDW_INVALIDATE | RDW_UPDATENOW) - .ok() - .log_err(); - } - } - } - pub fn window_from_hwnd(&self, hwnd: HWND) -> Option> { self.raw_window_handles .read() .iter() - .find(|entry| *entry == &hwnd) - .and_then(|hwnd| window_from_hwnd(*hwnd)) + .find(|entry| entry.as_raw() == hwnd) + .and_then(|hwnd| window_from_hwnd(hwnd.as_raw())) } #[inline] @@ -158,7 +149,7 @@ impl WindowsPlatform { .read() .iter() .for_each(|handle| unsafe { - PostMessageW(Some(*handle), message, wparam, lparam).log_err(); + PostMessageW(Some(handle.as_raw()), message, wparam, lparam).log_err(); }); } @@ -166,7 +157,7 @@ impl WindowsPlatform { let mut lock = self.raw_window_handles.write(); let index = lock .iter() - .position(|handle| *handle == target_window) + .position(|handle| handle.as_raw() == target_window) .unwrap(); lock.remove(index); @@ -226,19 +217,19 @@ impl WindowsPlatform { } } - // Returns true if the app should quit. - fn handle_events(&self) -> bool { + // Returns if the app should quit. + fn handle_events(&self) { let mut msg = MSG::default(); unsafe { - while PeekMessageW(&mut msg, None, 0, 0, PM_REMOVE).as_bool() { + while GetMessageW(&mut msg, None, 0, 0).as_bool() { match msg.message { - WM_QUIT => return true, + WM_QUIT => return, WM_INPUTLANGCHANGE | WM_GPUI_CLOSE_ONE_WINDOW | WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD | WM_GPUI_DOCK_MENU_ACTION => { - if self.handle_gpui_evnets(msg.message, msg.wParam, msg.lParam, &msg) { - return true; + if self.handle_gpui_events(msg.message, msg.wParam, msg.lParam, &msg) { + return; } } _ => { @@ -247,11 +238,10 @@ impl WindowsPlatform { } } } - false } // Returns true if the app should quit. - fn handle_gpui_evnets( + fn handle_gpui_events( &self, message: u32, wparam: WPARAM, @@ -315,8 +305,28 @@ impl WindowsPlatform { self.raw_window_handles .read() .iter() - .find(|&&hwnd| hwnd == active_window_hwnd) - .copied() + .find(|hwnd| hwnd.as_raw() == active_window_hwnd) + .map(|hwnd| hwnd.as_raw()) + } + + fn begin_vsync_thread(&self) { + let all_windows = Arc::downgrade(&self.raw_window_handles); + std::thread::spawn(move || { + let vsync_provider = VSyncProvider::new(); + loop { + vsync_provider.wait_for_vsync(); + let Some(all_windows) = all_windows.upgrade() else { + break; + }; + for hwnd in all_windows.read().iter() { + unsafe { + RedrawWindow(Some(hwnd.as_raw()), None, None, RDW_INVALIDATE) + .ok() + .log_err(); + } + } + } + }); } } @@ -347,12 +357,8 @@ impl Platform for WindowsPlatform { fn run(&self, on_finish_launching: Box) { on_finish_launching(); - loop { - if self.handle_events() { - break; - } - self.redraw_all(); - } + self.begin_vsync_thread(); + self.handle_events(); if let Some(ref mut callback) = self.state.borrow_mut().callbacks.quit { callback(); @@ -365,9 +371,9 @@ impl Platform for WindowsPlatform { .detach(); } - fn restart(&self, _: Option) { + fn restart(&self, binary_path: Option) { let pid = std::process::id(); - let Some(app_path) = self.app_path().log_err() else { + let Some(app_path) = binary_path.or(self.app_path().log_err()) else { return; }; let script = format!( @@ -445,7 +451,7 @@ impl Platform for WindowsPlatform { ) -> Result> { let window = WindowsWindow::new(handle, options, self.generate_creation_info())?; let handle = window.get_raw_handle(); - self.raw_window_handles.write().push(handle); + self.raw_window_handles.write().push(handle.into()); Ok(Box::new(window)) } @@ -455,13 +461,15 @@ impl Platform for WindowsPlatform { } fn open_url(&self, url: &str) { + if url.is_empty() { + return; + } let url_string = url.to_string(); self.background_executor() .spawn(async move { - if url_string.is_empty() { - return; - } - open_target(url_string.as_str()); + open_target(&url_string) + .with_context(|| format!("Opening url: {}", url_string)) + .log_err(); }) .detach(); } @@ -485,13 +493,18 @@ impl Platform for WindowsPlatform { rx } - fn prompt_for_new_path(&self, directory: &Path) -> Receiver>> { + fn prompt_for_new_path( + &self, + directory: &Path, + suggested_name: Option<&str>, + ) -> Receiver>> { let directory = directory.to_owned(); + let suggested_name = suggested_name.map(|s| s.to_owned()); let (tx, rx) = oneshot::channel(); let window = self.find_current_active_window(); self.foreground_executor() .spawn(async move { - let _ = tx.send(file_save_dialog(directory, window)); + let _ = tx.send(file_save_dialog(directory, suggested_name, window)); }) .detach(); @@ -504,37 +517,29 @@ impl Platform for WindowsPlatform { } fn reveal_path(&self, path: &Path) { - let Ok(file_full_path) = path.canonicalize() else { - log::error!("unable to parse file path"); + if path.as_os_str().is_empty() { return; - }; + } + let path = path.to_path_buf(); self.background_executor() .spawn(async move { - let Some(path) = file_full_path.to_str() else { - return; - }; - if path.is_empty() { - return; - } - open_target_in_explorer(path); + open_target_in_explorer(&path) + .with_context(|| format!("Revealing path {} in explorer", path.display())) + .log_err(); }) .detach(); } fn open_with_system(&self, path: &Path) { - let Ok(full_path) = path.canonicalize() else { - log::error!("unable to parse file full path: {}", path.display()); + if path.as_os_str().is_empty() { return; - }; + } + let path = path.to_path_buf(); self.background_executor() .spawn(async move { - let Some(full_path_str) = full_path.to_str() else { - return; - }; - if full_path_str.is_empty() { - return; - }; - open_target(full_path_str); + open_target(&path) + .with_context(|| format!("Opening {} with system", path.display())) + .log_err(); }) .detach(); } @@ -725,39 +730,67 @@ pub(crate) struct WindowCreationInfo { pub(crate) disable_direct_composition: bool, } -fn open_target(target: &str) { - unsafe { - let ret = ShellExecuteW( +fn open_target(target: impl AsRef) -> Result<()> { + let target = target.as_ref(); + let ret = unsafe { + ShellExecuteW( None, windows::core::w!("open"), &HSTRING::from(target), None, None, SW_SHOWDEFAULT, - ); - if ret.0 as isize <= 32 { - log::error!("Unable to open target: {}", std::io::Error::last_os_error()); - } + ) + }; + if ret.0 as isize <= 32 { + Err(anyhow::anyhow!( + "Unable to open target: {}", + std::io::Error::last_os_error() + )) + } else { + Ok(()) } } -fn open_target_in_explorer(target: &str) { +fn open_target_in_explorer(target: &Path) -> Result<()> { + let dir = target.parent().context("No parent folder found")?; + let desktop = unsafe { SHGetDesktopFolder()? }; + + let mut dir_item = std::ptr::null_mut(); unsafe { - let ret = ShellExecuteW( + desktop.ParseDisplayName( + HWND::default(), None, - windows::core::w!("open"), - windows::core::w!("explorer.exe"), - &HSTRING::from(format!("/select,{}", target).as_str()), + &HSTRING::from(dir), None, - SW_SHOWDEFAULT, - ); - if ret.0 as isize <= 32 { - log::error!( - "Unable to open target in explorer: {}", - std::io::Error::last_os_error() - ); - } + &mut dir_item, + std::ptr::null_mut(), + )?; } + + let mut file_item = std::ptr::null_mut(); + unsafe { + desktop.ParseDisplayName( + HWND::default(), + None, + &HSTRING::from(target), + None, + &mut file_item, + std::ptr::null_mut(), + )?; + } + + let highlight = [file_item as *const _]; + unsafe { SHOpenFolderAndSelectItems(dir_item as _, Some(&highlight), 0) }.or_else(|err| { + if err.code().0 == ERROR_FILE_NOT_FOUND.0 as i32 { + // On some systems, the above call mysteriously fails with "file not + // found" even though the file is there. In these cases, ShellExecute() + // seems to work as a fallback (although it won't select the file). + open_target(dir).context("Opening target parent folder") + } else { + Err(anyhow::anyhow!("Can not open target path: {}", err)) + } + }) } fn file_open_dialog( @@ -777,6 +810,12 @@ fn file_open_dialog( unsafe { folder_dialog.SetOptions(dialog_options)?; + + if let Some(prompt) = options.prompt { + let prompt: &str = &prompt; + folder_dialog.SetOkButtonLabel(&HSTRING::from(prompt))?; + } + if folder_dialog.Show(window).is_err() { // User cancelled return Ok(None); @@ -799,17 +838,26 @@ fn file_open_dialog( Ok(Some(paths)) } -fn file_save_dialog(directory: PathBuf, window: Option) -> Result> { +fn file_save_dialog( + directory: PathBuf, + suggested_name: Option, + window: Option, +) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; - if !directory.to_string_lossy().is_empty() { - if let Some(full_path) = directory.canonicalize().log_err() { - let full_path = SanitizedPath::from(full_path); - let full_path_string = full_path.to_string(); - let path_item: IShellItem = - unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_string), None)? }; - unsafe { dialog.SetFolder(&path_item).log_err() }; - } + if !directory.to_string_lossy().is_empty() + && let Some(full_path) = directory.canonicalize().log_err() + { + let full_path = SanitizedPath::from(full_path); + let full_path_string = full_path.to_string(); + let path_item: IShellItem = + unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_string), None)? }; + unsafe { dialog.SetFolder(&path_item).log_err() }; } + + if let Some(suggested_name) = suggested_name { + unsafe { dialog.SetFileName(&HSTRING::from(suggested_name)).log_err() }; + } + unsafe { dialog.SetFileTypes(&[Common::COMDLG_FILTERSPEC { pszName: windows::core::w!("All files"), diff --git a/crates/gpui/src/platform/windows/shaders.hlsl b/crates/gpui/src/platform/windows/shaders.hlsl index 25830e4b6c3183772aaa3c0a73bc1cef33b908a9..6fabe859e3fe6de58c438642455964e135258860 100644 --- a/crates/gpui/src/platform/windows/shaders.hlsl +++ b/crates/gpui/src/platform/windows/shaders.hlsl @@ -914,7 +914,7 @@ float4 path_rasterization_fragment(PathFragmentInput input): SV_Target { float2 dx = ddx(input.st_position); float2 dy = ddy(input.st_position); PathRasterizationSprite sprite = path_rasterization_sprites[input.vertex_id]; - + Background background = sprite.color; Bounds bounds = sprite.bounds; @@ -1021,13 +1021,18 @@ UnderlineVertexOutput underline_vertex(uint vertex_id: SV_VertexID, uint underli } float4 underline_fragment(UnderlineFragmentInput input): SV_Target { + const float WAVE_FREQUENCY = 2.0; + const float WAVE_HEIGHT_RATIO = 0.8; + Underline underline = underlines[input.underline_id]; if (underline.wavy) { float half_thickness = underline.thickness * 0.5; float2 origin = underline.bounds.origin; + float2 st = ((input.position.xy - origin) / underline.bounds.size.y) - float2(0., 0.5); - float frequency = (M_PI_F * (3. * underline.thickness)) / 8.; - float amplitude = 1. / (2. * underline.thickness); + float frequency = (M_PI_F * WAVE_FREQUENCY * underline.thickness) / underline.bounds.size.y; + float amplitude = (underline.thickness * WAVE_HEIGHT_RATIO) / underline.bounds.size.y; + float sine = sin(st.x * frequency) * amplitude; float dSine = cos(st.x * frequency) * amplitude * frequency; float distance = (st.y - sine) / sqrt(1. + dSine * dSine); diff --git a/crates/gpui/src/platform/windows/util.rs b/crates/gpui/src/platform/windows/util.rs index 5fb8febe3b9b0b71e5d63ff4070f81b95c3dabf7..af71dfe4a1f89246ccfceb6fc536acd199477e88 100644 --- a/crates/gpui/src/platform/windows/util.rs +++ b/crates/gpui/src/platform/windows/util.rs @@ -1,14 +1,18 @@ use std::sync::OnceLock; use ::util::ResultExt; +use anyhow::Context; use windows::{ UI::{ Color, ViewManagement::{UIColorType, UISettings}, }, Wdk::System::SystemServices::RtlGetVersion, - Win32::{Foundation::*, Graphics::Dwm::*, UI::WindowsAndMessaging::*}, - core::{BOOL, HSTRING}, + Win32::{ + Foundation::*, Graphics::Dwm::*, System::LibraryLoader::LoadLibraryA, + UI::WindowsAndMessaging::*, + }, + core::{BOOL, HSTRING, PCSTR}, }; use crate::*; @@ -197,3 +201,19 @@ pub(crate) fn show_error(title: &str, content: String) { ) }; } + +pub(crate) fn with_dll_library(dll_name: PCSTR, f: F) -> Result +where + F: FnOnce(HMODULE) -> Result, +{ + let library = unsafe { + LoadLibraryA(dll_name).with_context(|| format!("Loading dll: {}", dll_name.display()))? + }; + let result = f(library); + unsafe { + FreeLibrary(library) + .with_context(|| format!("Freeing dll: {}", dll_name.display())) + .log_err(); + } + result +} diff --git a/crates/gpui/src/platform/windows/vsync.rs b/crates/gpui/src/platform/windows/vsync.rs new file mode 100644 index 0000000000000000000000000000000000000000..6d09b0960f11cefce007413066620b3b332e1ae9 --- /dev/null +++ b/crates/gpui/src/platform/windows/vsync.rs @@ -0,0 +1,174 @@ +use std::{ + sync::LazyLock, + time::{Duration, Instant}, +}; + +use anyhow::{Context, Result}; +use util::ResultExt; +use windows::{ + Win32::{ + Foundation::{HANDLE, HWND}, + Graphics::{ + DirectComposition::{ + COMPOSITION_FRAME_ID_COMPLETED, COMPOSITION_FRAME_ID_TYPE, COMPOSITION_FRAME_STATS, + COMPOSITION_TARGET_ID, + }, + Dwm::{DWM_TIMING_INFO, DwmFlush, DwmGetCompositionTimingInfo}, + }, + System::{ + LibraryLoader::{GetModuleHandleA, GetProcAddress}, + Performance::QueryPerformanceFrequency, + Threading::INFINITE, + }, + }, + core::{HRESULT, s}, +}; + +static QPC_TICKS_PER_SECOND: LazyLock = LazyLock::new(|| { + let mut frequency = 0; + // On systems that run Windows XP or later, the function will always succeed and + // will thus never return zero. + unsafe { QueryPerformanceFrequency(&mut frequency).unwrap() }; + frequency as u64 +}); + +const VSYNC_INTERVAL_THRESHOLD: Duration = Duration::from_millis(1); +const DEFAULT_VSYNC_INTERVAL: Duration = Duration::from_micros(16_666); // ~60Hz + +// Here we are using dynamic loading of DirectComposition functions, +// or the app will refuse to start on windows systems that do not support DirectComposition. +type DCompositionGetFrameId = + unsafe extern "system" fn(frameidtype: COMPOSITION_FRAME_ID_TYPE, frameid: *mut u64) -> HRESULT; +type DCompositionGetStatistics = unsafe extern "system" fn( + frameid: u64, + framestats: *mut COMPOSITION_FRAME_STATS, + targetidcount: u32, + targetids: *mut COMPOSITION_TARGET_ID, + actualtargetidcount: *mut u32, +) -> HRESULT; +type DCompositionWaitForCompositorClock = + unsafe extern "system" fn(count: u32, handles: *const HANDLE, timeoutinms: u32) -> u32; + +pub(crate) struct VSyncProvider { + interval: Duration, + f: Box bool>, +} + +impl VSyncProvider { + pub(crate) fn new() -> Self { + if let Some((get_frame_id, get_statistics, wait_for_comp_clock)) = + initialize_direct_composition() + .context("Retrieving DirectComposition functions") + .log_with_level(log::Level::Warn) + { + let interval = get_dwm_interval_from_direct_composition(get_frame_id, get_statistics) + .context("Failed to get DWM interval from DirectComposition") + .log_err() + .unwrap_or(DEFAULT_VSYNC_INTERVAL); + log::info!( + "DirectComposition is supported for VSync, interval: {:?}", + interval + ); + let f = Box::new(move || unsafe { + wait_for_comp_clock(0, std::ptr::null(), INFINITE) == 0 + }); + Self { interval, f } + } else { + let interval = get_dwm_interval() + .context("Failed to get DWM interval") + .log_err() + .unwrap_or(DEFAULT_VSYNC_INTERVAL); + log::info!( + "DirectComposition is not supported for VSync, falling back to DWM, interval: {:?}", + interval + ); + let f = Box::new(|| unsafe { DwmFlush().is_ok() }); + Self { interval, f } + } + } + + pub(crate) fn wait_for_vsync(&self) { + let vsync_start = Instant::now(); + let wait_succeeded = (self.f)(); + let elapsed = vsync_start.elapsed(); + // DwmFlush and DCompositionWaitForCompositorClock returns very early + // instead of waiting until vblank when the monitor goes to sleep or is + // unplugged (nothing to present due to desktop occlusion). We use 1ms as + // a threshhold for the duration of the wait functions and fallback to + // Sleep() if it returns before that. This could happen during normal + // operation for the first call after the vsync thread becomes non-idle, + // but it shouldn't happen often. + if !wait_succeeded || elapsed < VSYNC_INTERVAL_THRESHOLD { + log::trace!("VSyncProvider::wait_for_vsync() took less time than expected"); + std::thread::sleep(self.interval); + } + } +} + +fn initialize_direct_composition() -> Result<( + DCompositionGetFrameId, + DCompositionGetStatistics, + DCompositionWaitForCompositorClock, +)> { + unsafe { + // Load DLL at runtime since older Windows versions don't have dcomp. + let hmodule = GetModuleHandleA(s!("dcomp.dll")).context("Loading dcomp.dll")?; + let get_frame_id_addr = GetProcAddress(hmodule, s!("DCompositionGetFrameId")) + .context("Function DCompositionGetFrameId not found")?; + let get_statistics_addr = GetProcAddress(hmodule, s!("DCompositionGetStatistics")) + .context("Function DCompositionGetStatistics not found")?; + let wait_for_compositor_clock_addr = + GetProcAddress(hmodule, s!("DCompositionWaitForCompositorClock")) + .context("Function DCompositionWaitForCompositorClock not found")?; + let get_frame_id: DCompositionGetFrameId = std::mem::transmute(get_frame_id_addr); + let get_statistics: DCompositionGetStatistics = std::mem::transmute(get_statistics_addr); + let wait_for_compositor_clock: DCompositionWaitForCompositorClock = + std::mem::transmute(wait_for_compositor_clock_addr); + Ok((get_frame_id, get_statistics, wait_for_compositor_clock)) + } +} + +fn get_dwm_interval_from_direct_composition( + get_frame_id: DCompositionGetFrameId, + get_statistics: DCompositionGetStatistics, +) -> Result { + let mut frame_id = 0; + unsafe { get_frame_id(COMPOSITION_FRAME_ID_COMPLETED, &mut frame_id) }.ok()?; + let mut stats = COMPOSITION_FRAME_STATS::default(); + unsafe { + get_statistics( + frame_id, + &mut stats, + 0, + std::ptr::null_mut(), + std::ptr::null_mut(), + ) + } + .ok()?; + Ok(retrieve_duration(stats.framePeriod, *QPC_TICKS_PER_SECOND)) +} + +fn get_dwm_interval() -> Result { + let mut timing_info = DWM_TIMING_INFO { + cbSize: std::mem::size_of::() as u32, + ..Default::default() + }; + unsafe { DwmGetCompositionTimingInfo(HWND::default(), &mut timing_info) }?; + let interval = retrieve_duration(timing_info.qpcRefreshPeriod, *QPC_TICKS_PER_SECOND); + // Check for interval values that are impossibly low. A 29 microsecond + // interval was seen (from a qpcRefreshPeriod of 60). + if interval < VSYNC_INTERVAL_THRESHOLD { + Ok(retrieve_duration( + timing_info.rateRefresh.uiDenominator as u64, + timing_info.rateRefresh.uiNumerator as u64, + )) + } else { + Ok(interval) + } +} + +#[inline] +fn retrieve_duration(counts: u64, ticks_per_second: u64) -> Duration { + let ticks_per_microsecond = ticks_per_second / 1_000_000; + Duration::from_micros(counts / ticks_per_microsecond) +} diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 32a6da23915d1e2bdf61c662e364119e9c6a8c64..99e50733714ba3e280508585f479a90e7edd76d7 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -592,10 +592,7 @@ impl PlatformWindow for WindowsWindow { ) -> Option> { let (done_tx, done_rx) = oneshot::channel(); let msg = msg.to_string(); - let detail_string = match detail { - Some(info) => Some(info.to_string()), - None => None, - }; + let detail_string = detail.map(|detail| detail.to_string()); let handle = self.0.hwnd; let answers = answers.to_vec(); self.0 diff --git a/crates/gpui/src/platform/windows/wrapper.rs b/crates/gpui/src/platform/windows/wrapper.rs index 6015dffdab299754d9469684ead08a9d5c95d4c6..60bbc433cabc1c6b7c72c9912196ad58f4093da7 100644 --- a/crates/gpui/src/platform/windows/wrapper.rs +++ b/crates/gpui/src/platform/windows/wrapper.rs @@ -1,23 +1,23 @@ use std::ops::Deref; -use windows::Win32::{Foundation::HANDLE, UI::WindowsAndMessaging::HCURSOR}; +use windows::Win32::{Foundation::HWND, UI::WindowsAndMessaging::HCURSOR}; #[derive(Debug, Clone, Copy)] -pub(crate) struct SafeHandle { - raw: HANDLE, +pub(crate) struct SafeCursor { + raw: HCURSOR, } -unsafe impl Send for SafeHandle {} -unsafe impl Sync for SafeHandle {} +unsafe impl Send for SafeCursor {} +unsafe impl Sync for SafeCursor {} -impl From for SafeHandle { - fn from(value: HANDLE) -> Self { - SafeHandle { raw: value } +impl From for SafeCursor { + fn from(value: HCURSOR) -> Self { + SafeCursor { raw: value } } } -impl Deref for SafeHandle { - type Target = HANDLE; +impl Deref for SafeCursor { + type Target = HCURSOR; fn deref(&self) -> &Self::Target { &self.raw @@ -25,21 +25,27 @@ impl Deref for SafeHandle { } #[derive(Debug, Clone, Copy)] -pub(crate) struct SafeCursor { - raw: HCURSOR, +pub(crate) struct SafeHwnd { + raw: HWND, } -unsafe impl Send for SafeCursor {} -unsafe impl Sync for SafeCursor {} +impl SafeHwnd { + pub(crate) fn as_raw(&self) -> HWND { + self.raw + } +} -impl From for SafeCursor { - fn from(value: HCURSOR) -> Self { - SafeCursor { raw: value } +unsafe impl Send for SafeHwnd {} +unsafe impl Sync for SafeHwnd {} + +impl From for SafeHwnd { + fn from(value: HWND) -> Self { + SafeHwnd { raw: value } } } -impl Deref for SafeCursor { - type Target = HCURSOR; +impl Deref for SafeHwnd { + type Target = HWND; fn deref(&self) -> &Self::Target { &self.raw diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index c527dfe750beb2d19ed6750e48f71208ec2720bf..758d06e5972e36b15fe0b8ce9d7d3193f8d3844d 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -476,7 +476,7 @@ pub(crate) struct Underline { pub content_mask: ContentMask, pub color: Hsla, pub thickness: ScaledPixels, - pub wavy: bool, + pub wavy: u32, } impl From for Primitive { diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index c325f98cd243121264875d7a9452308772d49e86..350184d350aec8c5995fe7d2f0856f1fe1cfea0f 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -23,6 +23,11 @@ impl SharedString { pub fn new(str: impl Into>) -> Self { SharedString(ArcCow::Owned(str.into())) } + + /// Get a &str from the underlying string. + pub fn as_str(&self) -> &str { + &self.0 + } } impl JsonSchema for SharedString { @@ -103,7 +108,7 @@ impl From for Arc { fn from(val: SharedString) -> Self { match val.0 { ArcCow::Borrowed(borrowed) => Arc::from(borrowed), - ArcCow::Owned(owned) => owned.clone(), + ArcCow::Owned(owned) => owned, } } } diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index 560de7b924cb8ed079a219425eda3762d4c13af0..5b69ce7fa6eb06affc2f77c0d1bdfbe4165c206a 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -7,7 +7,7 @@ use std::{ use crate::{ AbsoluteLength, App, Background, BackgroundTag, BorderStyle, Bounds, ContentMask, Corners, CornersRefinement, CursorStyle, DefiniteLength, DevicePixels, Edges, EdgesRefinement, Font, - FontFallbacks, FontFeatures, FontStyle, FontWeight, Hsla, Length, Pixels, Point, + FontFallbacks, FontFeatures, FontStyle, FontWeight, GridLocation, Hsla, Length, Pixels, Point, PointRefinement, Rgba, SharedString, Size, SizeRefinement, Styled, TextRun, Window, black, phi, point, quad, rems, size, }; @@ -260,6 +260,17 @@ pub struct Style { /// The opacity of this element pub opacity: Option, + /// The grid columns of this element + /// Equivalent to the Tailwind `grid-cols-` + pub grid_cols: Option, + + /// The row span of this element + /// Equivalent to the Tailwind `grid-rows-` + pub grid_rows: Option, + + /// The grid location of this element + pub grid_location: Option, + /// Whether to draw a red debugging outline around this element #[cfg(debug_assertions)] pub debug: bool, @@ -275,6 +286,13 @@ impl Styled for StyleRefinement { } } +impl StyleRefinement { + /// The grid location of this element + pub fn grid_location_mut(&mut self) -> &mut GridLocation { + self.grid_location.get_or_insert_default() + } +} + /// The value of the visibility property, similar to the CSS property `visibility` #[derive(Default, Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, JsonSchema)] pub enum Visibility { @@ -555,7 +573,7 @@ impl Style { if self .border_color - .map_or(false, |color| !color.is_transparent()) + .is_some_and(|color| !color.is_transparent()) { min.x += self.border_widths.left.to_pixels(rem_size); max.x -= self.border_widths.right.to_pixels(rem_size); @@ -615,7 +633,7 @@ impl Style { window.paint_shadows(bounds, corner_radii, &self.box_shadow); let background_color = self.background.as_ref().and_then(Fill::color); - if background_color.map_or(false, |color| !color.is_transparent()) { + if background_color.is_some_and(|color| !color.is_transparent()) { let mut border_color = match background_color { Some(color) => match color.tag { BackgroundTag::Solid => color.solid, @@ -711,7 +729,7 @@ impl Style { fn is_border_visible(&self) -> bool { self.border_color - .map_or(false, |color| !color.is_transparent()) + .is_some_and(|color| !color.is_transparent()) && self.border_widths.any(|length| !length.is_zero()) } } @@ -757,6 +775,9 @@ impl Default for Style { text: TextStyleRefinement::default(), mouse_cursor: None, opacity: None, + grid_rows: None, + grid_cols: None, + grid_location: None, #[cfg(debug_assertions)] debug: false, diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index b689f3268729115d61b3d02897b886113f8e9b34..c714cac14fe894410a05d40c4c5b30d6fbf61e2d 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -1,8 +1,8 @@ use crate::{ self as gpui, AbsoluteLength, AlignContent, AlignItems, BorderStyle, CursorStyle, - DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontStyle, FontWeight, Hsla, - JustifyContent, Length, SharedString, StrikethroughStyle, StyleRefinement, TextAlign, - TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px, relative, rems, + DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontStyle, FontWeight, + GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle, StyleRefinement, + TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px, relative, rems, }; pub use gpui_macros::{ border_style_methods, box_shadow_style_methods, cursor_style_methods, margin_style_methods, @@ -46,6 +46,13 @@ pub trait Styled: Sized { self } + /// Sets the display type of the element to `grid`. + /// [Docs](https://tailwindcss.com/docs/display) + fn grid(mut self) -> Self { + self.style().display = Some(Display::Grid); + self + } + /// Sets the whitespace of the element to `normal`. /// [Docs](https://tailwindcss.com/docs/whitespace#normal) fn whitespace_normal(mut self) -> Self { @@ -640,6 +647,102 @@ pub trait Styled: Sized { self } + /// Sets the grid columns of this element. + fn grid_cols(mut self, cols: u16) -> Self { + self.style().grid_cols = Some(cols); + self + } + + /// Sets the grid rows of this element. + fn grid_rows(mut self, rows: u16) -> Self { + self.style().grid_rows = Some(rows); + self + } + + /// Sets the column start of this element. + fn col_start(mut self, start: i16) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.column.start = GridPlacement::Line(start); + self + } + + /// Sets the column start of this element to auto. + fn col_start_auto(mut self) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.column.start = GridPlacement::Auto; + self + } + + /// Sets the column end of this element. + fn col_end(mut self, end: i16) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.column.end = GridPlacement::Line(end); + self + } + + /// Sets the column end of this element to auto. + fn col_end_auto(mut self) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.column.end = GridPlacement::Auto; + self + } + + /// Sets the column span of this element. + fn col_span(mut self, span: u16) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.column = GridPlacement::Span(span)..GridPlacement::Span(span); + self + } + + /// Sets the row span of this element. + fn col_span_full(mut self) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.column = GridPlacement::Line(1)..GridPlacement::Line(-1); + self + } + + /// Sets the row start of this element. + fn row_start(mut self, start: i16) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.row.start = GridPlacement::Line(start); + self + } + + /// Sets the row start of this element to "auto" + fn row_start_auto(mut self) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.row.start = GridPlacement::Auto; + self + } + + /// Sets the row end of this element. + fn row_end(mut self, end: i16) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.row.end = GridPlacement::Line(end); + self + } + + /// Sets the row end of this element to "auto" + fn row_end_auto(mut self) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.row.end = GridPlacement::Auto; + self + } + + /// Sets the row span of this element. + fn row_span(mut self, span: u16) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.row = GridPlacement::Span(span)..GridPlacement::Span(span); + self + } + + /// Sets the row span of this element. + fn row_span_full(mut self) -> Self { + let grid_location = self.style().grid_location_mut(); + grid_location.row = GridPlacement::Line(1)..GridPlacement::Line(-1); + self + } + /// Draws a debug border around this element. #[cfg(debug_assertions)] fn debug(mut self) -> Self { diff --git a/crates/gpui/src/subscription.rs b/crates/gpui/src/subscription.rs index a584f1a45f82094ce9b867bc5f43805c48f93ebe..bd869f8d32cdfc81917fc2287b7dc62fac7d727d 100644 --- a/crates/gpui/src/subscription.rs +++ b/crates/gpui/src/subscription.rs @@ -201,3 +201,9 @@ impl Drop for Subscription { } } } + +impl std::fmt::Debug for Subscription { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Subscription").finish() + } +} diff --git a/crates/gpui/src/tab_stop.rs b/crates/gpui/src/tab_stop.rs index 7dde42efed8a138de3a29657683d95c60e27dda0..c4d2fda6e9a9c3e0adfb2d02cf5c372869d42751 100644 --- a/crates/gpui/src/tab_stop.rs +++ b/crates/gpui/src/tab_stop.rs @@ -45,27 +45,18 @@ impl TabHandles { }) .unwrap_or_default(); - if let Some(next_handle) = self.handles.get(next_ix) { - Some(next_handle.clone()) - } else { - None - } + self.handles.get(next_ix).cloned() } pub(crate) fn prev(&self, focused_id: Option<&FocusId>) -> Option { let ix = self.current_index(focused_id).unwrap_or_default(); - let prev_ix; - if ix == 0 { - prev_ix = self.handles.len().saturating_sub(1); + let prev_ix = if ix == 0 { + self.handles.len().saturating_sub(1) } else { - prev_ix = ix.saturating_sub(1); - } + ix.saturating_sub(1) + }; - if let Some(prev_handle) = self.handles.get(prev_ix) { - Some(prev_handle.clone()) - } else { - None - } + self.handles.get(prev_ix).cloned() } } @@ -90,7 +81,7 @@ mod tests { ]; for handle in focus_handles.iter() { - tab.insert(&handle); + tab.insert(handle); } assert_eq!( tab.handles diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index f7fa54256df20b38170ecb4d3e48c22913e44ae6..58386ad1f5031e1427baad05c4db075df1b2d761 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -3,7 +3,8 @@ use crate::{ }; use collections::{FxHashMap, FxHashSet}; use smallvec::SmallVec; -use std::fmt::Debug; +use stacksafe::{StackSafe, stacksafe}; +use std::{fmt::Debug, ops::Range}; use taffy::{ TaffyTree, TraversePartialTree as _, geometry::{Point as TaffyPoint, Rect as TaffyRect, Size as TaffySize}, @@ -11,8 +12,15 @@ use taffy::{ tree::NodeId, }; -type NodeMeasureFn = Box< - dyn FnMut(Size>, Size, &mut Window, &mut App) -> Size, +type NodeMeasureFn = StackSafe< + Box< + dyn FnMut( + Size>, + Size, + &mut Window, + &mut App, + ) -> Size, + >, >; struct NodeContext { @@ -50,23 +58,21 @@ impl TaffyLayoutEngine { children: &[LayoutId], ) -> LayoutId { let taffy_style = style.to_taffy(rem_size); - let layout_id = if children.is_empty() { + + if children.is_empty() { self.taffy .new_leaf(taffy_style) .expect(EXPECT_MESSAGE) .into() } else { - let parent_id = self - .taffy + self.taffy // This is safe because LayoutId is repr(transparent) to taffy::tree::NodeId. .new_with_children(taffy_style, unsafe { std::mem::transmute::<&[LayoutId], &[taffy::NodeId]>(children) }) .expect(EXPECT_MESSAGE) - .into(); - parent_id - }; - layout_id + .into() + } } pub fn request_measured_layout( @@ -83,17 +89,15 @@ impl TaffyLayoutEngine { ) -> LayoutId { let taffy_style = style.to_taffy(rem_size); - let layout_id = self - .taffy + self.taffy .new_leaf_with_context( taffy_style, NodeContext { - measure: Box::new(measure), + measure: StackSafe::new(Box::new(measure)), }, ) .expect(EXPECT_MESSAGE) - .into(); - layout_id + .into() } // Used to understand performance @@ -143,6 +147,7 @@ impl TaffyLayoutEngine { Ok(edges) } + #[stacksafe] pub fn compute_layout( &mut self, id: LayoutId, @@ -159,7 +164,6 @@ impl TaffyLayoutEngine { // for (a, b) in self.get_edges(id)? { // println!("N{} --> N{}", u64::from(a), u64::from(b)); // } - // println!(""); // if !self.computed_layouts.insert(id) { @@ -251,6 +255,25 @@ trait ToTaffy { impl ToTaffy for Style { fn to_taffy(&self, rem_size: Pixels) -> taffy::style::Style { + use taffy::style_helpers::{fr, length, minmax, repeat}; + + fn to_grid_line( + placement: &Range, + ) -> taffy::Line { + taffy::Line { + start: placement.start.into(), + end: placement.end.into(), + } + } + + fn to_grid_repeat( + unit: &Option, + ) -> Vec> { + // grid-template-columns: repeat(, minmax(0, 1fr)); + unit.map(|count| vec![repeat(count, vec![minmax(length(0.0), fr(1.0))])]) + .unwrap_or_default() + } + taffy::style::Style { display: self.display.into(), overflow: self.overflow.into(), @@ -274,7 +297,19 @@ impl ToTaffy for Style { flex_basis: self.flex_basis.to_taffy(rem_size), flex_grow: self.flex_grow, flex_shrink: self.flex_shrink, - ..Default::default() // Ignore grid properties for now + grid_template_rows: to_grid_repeat(&self.grid_rows), + grid_template_columns: to_grid_repeat(&self.grid_cols), + grid_row: self + .grid_location + .as_ref() + .map(|location| to_grid_line(&location.row)) + .unwrap_or_default(), + grid_column: self + .grid_location + .as_ref() + .map(|location| to_grid_line(&location.column)) + .unwrap_or_default(), + ..Default::default() } } } diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index ed1307c6cdb797d85b5a48b0d5ef033f1ebddc36..53991089da94c58d0035bff0d607ad3ab57a69bd 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -65,7 +65,7 @@ impl TextSystem { font_runs_pool: Mutex::default(), fallback_font_stack: smallvec![ // TODO: Remove this when Linux have implemented setting fallbacks. - font("Zed Plex Mono"), + font(".ZedMono"), font("Helvetica"), font("Segoe UI"), // Windows font("Cantarell"), // Gnome @@ -96,7 +96,7 @@ impl TextSystem { } /// Get the FontId for the configure font family and style. - pub fn font_id(&self, font: &Font) -> Result { + fn font_id(&self, font: &Font) -> Result { fn clone_font_id_result(font_id: &Result) -> Result { match font_id { Ok(font_id) => Ok(*font_id), @@ -366,15 +366,14 @@ impl WindowTextSystem { let mut decoration_runs = SmallVec::<[DecorationRun; 32]>::new(); for run in runs { - if let Some(last_run) = decoration_runs.last_mut() { - if last_run.color == run.color - && last_run.underline == run.underline - && last_run.strikethrough == run.strikethrough - && last_run.background_color == run.background_color - { - last_run.len += run.len as u32; - continue; - } + if let Some(last_run) = decoration_runs.last_mut() + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + && last_run.background_color == run.background_color + { + last_run.len += run.len as u32; + continue; } decoration_runs.push(DecorationRun { len: run.len as u32, @@ -436,7 +435,7 @@ impl WindowTextSystem { }); } - if decoration_runs.last().map_or(false, |last_run| { + if decoration_runs.last().is_some_and(|last_run| { last_run.color == run.color && last_run.underline == run.underline && last_run.strikethrough == run.strikethrough @@ -492,14 +491,14 @@ impl WindowTextSystem { let mut split_lines = text.split('\n'); let mut processed = false; - if let Some(first_line) = split_lines.next() { - if let Some(second_line) = split_lines.next() { - processed = true; - process_line(first_line.to_string().into()); - process_line(second_line.to_string().into()); - for line_text in split_lines { - process_line(line_text.to_string().into()); - } + if let Some(first_line) = split_lines.next() + && let Some(second_line) = split_lines.next() + { + processed = true; + process_line(first_line.to_string().into()); + process_line(second_line.to_string().into()); + for line_text in split_lines { + process_line(line_text.to_string().into()); } } @@ -534,11 +533,11 @@ impl WindowTextSystem { let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); for run in runs.iter() { let font_id = self.resolve_font(&run.font); - if let Some(last_run) = font_runs.last_mut() { - if last_run.font_id == font_id { - last_run.len += run.len; - continue; - } + if let Some(last_run) = font_runs.last_mut() + && last_run.font_id == font_id + { + last_run.len += run.len; + continue; } font_runs.push(FontRun { len: run.len, @@ -844,3 +843,16 @@ impl FontMetrics { (self.bounding_box / self.units_per_em as f32 * font_size.0).map(px) } } + +#[allow(unused)] +pub(crate) fn font_name_with_fallbacks<'a>(name: &'a str, system: &'a str) -> &'a str { + // Note: the "Zed Plex" fonts were deprecated as we are not allowed to use "Plex" + // in a derived font name. They are essentially indistinguishable from IBM Plex/Lilex, + // and so retained here for backward compatibility. + match name { + ".SystemUIFont" => system, + ".ZedSans" | "Zed Plex Sans" => "IBM Plex Sans", + ".ZedMono" | "Zed Plex Mono" => "Lilex", + _ => name, + } +} diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index 3813393d81deaff4ed9adb1d96e204b75953233f..8d559f981581858990fa545b8e0ba65bccdf80a8 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -292,10 +292,10 @@ fn paint_line( } if let Some(style_run) = style_run { - if let Some((_, underline_style)) = &mut current_underline { - if style_run.underline.as_ref() != Some(underline_style) { - finished_underline = current_underline.take(); - } + if let Some((_, underline_style)) = &mut current_underline + && style_run.underline.as_ref() != Some(underline_style) + { + finished_underline = current_underline.take(); } if let Some(run_underline) = style_run.underline.as_ref() { current_underline.get_or_insert(( @@ -310,10 +310,10 @@ fn paint_line( }, )); } - if let Some((_, strikethrough_style)) = &mut current_strikethrough { - if style_run.strikethrough.as_ref() != Some(strikethrough_style) { - finished_strikethrough = current_strikethrough.take(); - } + if let Some((_, strikethrough_style)) = &mut current_strikethrough + && style_run.strikethrough.as_ref() != Some(strikethrough_style) + { + finished_strikethrough = current_strikethrough.take(); } if let Some(run_strikethrough) = style_run.strikethrough.as_ref() { current_strikethrough.get_or_insert(( @@ -509,10 +509,10 @@ fn paint_line_background( } if let Some(style_run) = style_run { - if let Some((_, background_color)) = &mut current_background { - if style_run.background_color.as_ref() != Some(background_color) { - finished_background = current_background.take(); - } + if let Some((_, background_color)) = &mut current_background + && style_run.background_color.as_ref() != Some(background_color) + { + finished_background = current_background.take(); } if let Some(run_background) = style_run.background_color { current_background.get_or_insert(( diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index 9c2dd7f0871e5b67bd15d3a419c1c03496e2afaa..43694702a82566b8f84199dcfc4ff996da93588e 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -185,10 +185,10 @@ impl LineLayout { if width > wrap_width && boundary > last_boundary { // When used line_clamp, we should limit the number of lines. - if let Some(max_lines) = max_lines { - if boundaries.len() >= max_lines - 1 { - break; - } + if let Some(max_lines) = max_lines + && boundaries.len() >= max_lines - 1 + { + break; } if let Some(last_candidate_ix) = last_candidate_ix.take() { diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 5de26511d333bb75515ba3f7b0c9c22e39f5be3f..93ec6c854c31d3f312006f61d6994a4eee4b88ef 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -44,7 +44,7 @@ impl LineWrapper { let mut prev_c = '\0'; let mut index = 0; let mut candidates = fragments - .into_iter() + .iter() .flat_map(move |fragment| fragment.wrap_boundary_candidates()) .peekable(); iter::from_fn(move || { @@ -327,7 +327,7 @@ mod tests { fn build_wrapper() -> LineWrapper { let dispatcher = TestDispatcher::new(StdRng::seed_from_u64(0)); let cx = TestAppContext::build(dispatcher, None); - let id = cx.text_system().font_id(&font("Zed Plex Mono")).unwrap(); + let id = cx.text_system().resolve_font(&font(".ZedMono")); LineWrapper::new(id, px(16.), cx.text_system().platform_text_system.clone()) } diff --git a/crates/gpui/src/util.rs b/crates/gpui/src/util.rs index 5e92335fdc86e331d3a469c4384043fd9799b00a..3d7fa06e6ca013ae38b1c63d1bfd624d46cdf4f1 100644 --- a/crates/gpui/src/util.rs +++ b/crates/gpui/src/util.rs @@ -1,13 +1,11 @@ -use std::sync::atomic::AtomicUsize; -use std::sync::atomic::Ordering::SeqCst; -#[cfg(any(test, feature = "test-support"))] -use std::time::Duration; - -#[cfg(any(test, feature = "test-support"))] -use futures::Future; - -#[cfg(any(test, feature = "test-support"))] -use smol::future::FutureExt; +use crate::{BackgroundExecutor, Task}; +use std::{ + future::Future, + pin::Pin, + sync::atomic::{AtomicUsize, Ordering::SeqCst}, + task, + time::Duration, +}; pub use util::*; @@ -60,18 +58,63 @@ pub trait FluentBuilder { where Self: Sized, { - self.map(|this| { - if let Some(_) = option { - this - } else { - then(this) - } - }) + self.map(|this| if option.is_some() { this } else { then(this) }) + } +} + +/// Extensions for Future types that provide additional combinators and utilities. +pub trait FutureExt { + /// Requires a Future to complete before the specified duration has elapsed. + /// Similar to tokio::timeout. + fn with_timeout(self, timeout: Duration, executor: &BackgroundExecutor) -> WithTimeout + where + Self: Sized; +} + +impl FutureExt for T { + fn with_timeout(self, timeout: Duration, executor: &BackgroundExecutor) -> WithTimeout + where + Self: Sized, + { + WithTimeout { + future: self, + timer: executor.timer(timeout), + } + } +} + +pub struct WithTimeout { + future: T, + timer: Task<()>, +} + +#[derive(Debug, thiserror::Error)] +#[error("Timed out before future resolved")] +/// Error returned by with_timeout when the timeout duration elapsed before the future resolved +pub struct Timeout; + +impl Future for WithTimeout { + type Output = Result; + + fn poll(self: Pin<&mut Self>, cx: &mut task::Context) -> task::Poll { + // SAFETY: the fields of Timeout are private and we never move the future ourselves + // And its already pinned since we are being polled (all futures need to be pinned to be polled) + let this = unsafe { self.get_unchecked_mut() }; + let future = unsafe { Pin::new_unchecked(&mut this.future) }; + let timer = unsafe { Pin::new_unchecked(&mut this.timer) }; + + if let task::Poll::Ready(output) = future.poll(cx) { + task::Poll::Ready(Ok(output)) + } else if timer.poll(cx).is_ready() { + task::Poll::Ready(Err(Timeout)) + } else { + task::Poll::Pending + } } } #[cfg(any(test, feature = "test-support"))] -pub async fn timeout(timeout: Duration, f: F) -> Result +pub async fn smol_timeout(timeout: Duration, f: F) -> Result where F: Future, { @@ -80,7 +123,7 @@ where Err(()) }; let future = async move { Ok(f.await) }; - timer.race(future).await + smol::future::FutureExt::race(timer, future).await } /// Increment the given atomic counter if it is not zero. diff --git a/crates/gpui/src/view.rs b/crates/gpui/src/view.rs index f461e2f7d01a1dc2cdc93cda4f5854c8e958feaf..217971792ee978307a19f7e40374cb337e38a625 100644 --- a/crates/gpui/src/view.rs +++ b/crates/gpui/src/view.rs @@ -205,22 +205,21 @@ impl Element for AnyView { let content_mask = window.content_mask(); let text_style = window.text_style(); - if let Some(mut element_state) = element_state { - if element_state.cache_key.bounds == bounds - && element_state.cache_key.content_mask == content_mask - && element_state.cache_key.text_style == text_style - && !window.dirty_views.contains(&self.entity_id()) - && !window.refreshing - { - let prepaint_start = window.prepaint_index(); - window.reuse_prepaint(element_state.prepaint_range.clone()); - cx.entities - .extend_accessed(&element_state.accessed_entities); - let prepaint_end = window.prepaint_index(); - element_state.prepaint_range = prepaint_start..prepaint_end; - - return (None, element_state); - } + if let Some(mut element_state) = element_state + && element_state.cache_key.bounds == bounds + && element_state.cache_key.content_mask == content_mask + && element_state.cache_key.text_style == text_style + && !window.dirty_views.contains(&self.entity_id()) + && !window.refreshing + { + let prepaint_start = window.prepaint_index(); + window.reuse_prepaint(element_state.prepaint_range.clone()); + cx.entities + .extend_accessed(&element_state.accessed_entities); + let prepaint_end = window.prepaint_index(); + element_state.prepaint_range = prepaint_start..prepaint_end; + + return (None, element_state); } let refreshing = mem::replace(&mut window.refreshing, true); diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 40d3845ff9600f9dcd16fe55b0b27d5e3eddce09..0791dcc621a8a71ef350ad32f8cf9ab87fad8db2 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -243,14 +243,14 @@ impl FocusId { pub fn contains_focused(&self, window: &Window, cx: &App) -> bool { window .focused(cx) - .map_or(false, |focused| self.contains(focused.id, window)) + .is_some_and(|focused| self.contains(focused.id, window)) } /// Obtains whether the element associated with this handle is contained within the /// focused element or is itself focused. pub fn within_focused(&self, window: &Window, cx: &App) -> bool { let focused = window.focused(cx); - focused.map_or(false, |focused| focused.id.contains(*self, window)) + focused.is_some_and(|focused| focused.id.contains(*self, window)) } /// Obtains whether this handle contains the given handle in the most recently rendered frame. @@ -504,7 +504,7 @@ impl HitboxId { return true; } } - return false; + false } /// Checks if the hitbox with this ID contains the mouse and should handle scroll events. @@ -634,7 +634,7 @@ impl TooltipId { window .tooltip_bounds .as_ref() - .map_or(false, |tooltip_bounds| { + .is_some_and(|tooltip_bounds| { tooltip_bounds.id == *self && tooltip_bounds.bounds.contains(&window.mouse_position()) }) @@ -2453,7 +2453,7 @@ impl Window { /// time. pub fn get_asset(&mut self, source: &A::Source, cx: &mut App) -> Option { let (task, _) = cx.fetch_asset::(source); - task.clone().now_or_never() + task.now_or_never() } /// Obtain the current element offset. This method should only be called during the /// prepaint phase of element drawing. @@ -2814,7 +2814,7 @@ impl Window { content_mask: content_mask.scale(scale_factor), color: style.color.unwrap_or_default().opacity(element_opacity), thickness: style.thickness.scale(scale_factor), - wavy: style.wavy, + wavy: if style.wavy { 1 } else { 0 }, }); } @@ -2845,7 +2845,7 @@ impl Window { content_mask: content_mask.scale(scale_factor), thickness: style.thickness.scale(scale_factor), color: style.color.unwrap_or_default().opacity(opacity), - wavy: false, + wavy: 0, }); } @@ -3044,7 +3044,7 @@ impl Window { let tile = self .sprite_atlas - .get_or_insert_with(¶ms.clone().into(), &mut || { + .get_or_insert_with(¶ms.into(), &mut || { Ok(Some(( data.size(frame_index), Cow::Borrowed( @@ -3401,16 +3401,16 @@ impl Window { let focus_id = handle.id; let (subscription, activate) = self.new_focus_listener(Box::new(move |event, window, cx| { - if let Some(blurred_id) = event.previous_focus_path.last().copied() { - if event.is_focus_out(focus_id) { - let event = FocusOutEvent { - blurred: WeakFocusHandle { - id: blurred_id, - handles: Arc::downgrade(&cx.focus_handles), - }, - }; - listener(event, window, cx) - } + if let Some(blurred_id) = event.previous_focus_path.last().copied() + && event.is_focus_out(focus_id) + { + let event = FocusOutEvent { + blurred: WeakFocusHandle { + id: blurred_id, + handles: Arc::downgrade(&cx.focus_handles), + }, + }; + listener(event, window, cx) } true })); @@ -3444,12 +3444,12 @@ impl Window { return true; } - if let Some(input) = keystroke.key_char { - if let Some(mut input_handler) = self.platform_window.take_input_handler() { - input_handler.dispatch_input(&input, self, cx); - self.platform_window.set_input_handler(input_handler); - return true; - } + if let Some(input) = keystroke.key_char + && let Some(mut input_handler) = self.platform_window.take_input_handler() + { + input_handler.dispatch_input(&input, self, cx); + self.platform_window.set_input_handler(input_handler); + return true; } false @@ -3688,7 +3688,8 @@ impl Window { ); if !match_result.to_replay.is_empty() { - self.replay_pending_input(match_result.to_replay, cx) + self.replay_pending_input(match_result.to_replay, cx); + cx.propagate_event = true; } if !match_result.pending.is_empty() { @@ -3730,7 +3731,7 @@ impl Window { self.dispatch_keystroke_observers( event, Some(binding.action), - match_result.context_stack.clone(), + match_result.context_stack, cx, ); self.pending_input_changed(cx); @@ -3863,11 +3864,11 @@ impl Window { if !cx.propagate_event { continue 'replay; } - if let Some(input) = replay.keystroke.key_char.as_ref().cloned() { - if let Some(mut input_handler) = self.platform_window.take_input_handler() { - input_handler.dispatch_input(&input, self, cx); - self.platform_window.set_input_handler(input_handler) - } + if let Some(input) = replay.keystroke.key_char.as_ref().cloned() + && let Some(mut input_handler) = self.platform_window.take_input_handler() + { + input_handler.dispatch_input(&input, self, cx); + self.platform_window.set_input_handler(input_handler) } } } @@ -4308,15 +4309,15 @@ impl Window { cx: &mut App, f: impl FnOnce(&mut Option, &mut Self) -> R, ) -> R { - if let Some(inspector_id) = _inspector_id { - if let Some(inspector) = &self.inspector { - let inspector = inspector.clone(); - let active_element_id = inspector.read(cx).active_element_id(); - if Some(inspector_id) == active_element_id { - return inspector.update(cx, |inspector, _cx| { - inspector.with_active_element_state(self, f) - }); - } + if let Some(inspector_id) = _inspector_id + && let Some(inspector) = &self.inspector + { + let inspector = inspector.clone(); + let active_element_id = inspector.read(cx).active_element_id(); + if Some(inspector_id) == active_element_id { + return inspector.update(cx, |inspector, _cx| { + inspector.with_active_element_state(self, f) + }); } } f(&mut None, self) @@ -4388,15 +4389,13 @@ impl Window { if let Some(inspector) = self.inspector.as_ref() { let inspector = inspector.read(cx); if let Some((hitbox_id, _)) = self.hovered_inspector_hitbox(inspector, &self.next_frame) - { - if let Some(hitbox) = self + && let Some(hitbox) = self .next_frame .hitboxes .iter() .find(|hitbox| hitbox.id == hitbox_id) - { - self.paint_quad(crate::fill(hitbox.bounds, crate::rgba(0x61afef4d))); - } + { + self.paint_quad(crate::fill(hitbox.bounds, crate::rgba(0x61afef4d))); } } } @@ -4443,7 +4442,7 @@ impl Window { if let Some((_, inspector_id)) = self.hovered_inspector_hitbox(inspector, &self.rendered_frame) { - inspector.set_active_element_id(inspector_id.clone(), self); + inspector.set_active_element_id(inspector_id, self); } } }); @@ -4467,7 +4466,7 @@ impl Window { } } } - return None; + None } } @@ -4584,7 +4583,7 @@ impl WindowHandle { where C: AppContext, { - cx.read_window(self, |root_view, _cx| root_view.clone()) + cx.read_window(self, |root_view, _cx| root_view) } /// Check if this window is 'active'. diff --git a/crates/gpui_macros/src/derive_inspector_reflection.rs b/crates/gpui_macros/src/derive_inspector_reflection.rs index fa22f95f9a1c274d193a6985a84bf3cdecfcc17f..9c1cb503a87e5f726ba27d1868a6c053b36c6731 100644 --- a/crates/gpui_macros/src/derive_inspector_reflection.rs +++ b/crates/gpui_macros/src/derive_inspector_reflection.rs @@ -160,16 +160,14 @@ fn extract_doc_comment(attrs: &[Attribute]) -> Option { let mut doc_lines = Vec::new(); for attr in attrs { - if attr.path().is_ident("doc") { - if let Meta::NameValue(meta) = &attr.meta { - if let Expr::Lit(expr_lit) = &meta.value { - if let Lit::Str(lit_str) = &expr_lit.lit { - let line = lit_str.value(); - let line = line.strip_prefix(' ').unwrap_or(&line); - doc_lines.push(line.to_string()); - } - } - } + if attr.path().is_ident("doc") + && let Meta::NameValue(meta) = &attr.meta + && let Expr::Lit(expr_lit) = &meta.value + && let Lit::Str(lit_str) = &expr_lit.lit + { + let line = lit_str.value(); + let line = line.strip_prefix(' ').unwrap_or(&line); + doc_lines.push(line.to_string()); } } @@ -191,7 +189,7 @@ fn extract_cfg_attributes(attrs: &[Attribute]) -> Vec { fn is_called_from_gpui_crate(_span: Span) -> bool { // Check if we're being called from within the gpui crate by examining the call site // This is a heuristic approach - we check if the current crate name is "gpui" - std::env::var("CARGO_PKG_NAME").map_or(false, |name| name == "gpui") + std::env::var("CARGO_PKG_NAME").is_ok_and(|name| name == "gpui") } struct MacroExpander; diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index 3a58af67052d06f108b4b9c87d52fc358405466e..0f1365be77ec221d9061f588f84ff6acab3c32ab 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -172,7 +172,7 @@ pub fn box_shadow_style_methods(input: TokenStream) -> TokenStream { /// - `#[gpui::test(iterations = 5)]` runs five times, providing as seed the values in the range `0..5`. /// - `#[gpui::test(retries = 3)]` runs up to four times if it fails to try and make it pass. /// - `#[gpui::test(on_failure = "crate::test::report_failure")]` will call the specified function after the -/// tests fail so that you can write out more detail about the failure. +/// tests fail so that you can write out more detail about the failure. /// /// You can combine `iterations = ...` with `seeds(...)`: /// - `#[gpui::test(iterations = 5, seed = 10)]` is equivalent to `#[gpui::test(seeds(0, 1, 2, 3, 4, 10))]`. diff --git a/crates/gpui_macros/src/test.rs b/crates/gpui_macros/src/test.rs index 2c5214989716efc2ed4b7f6c590d07c25fc7110d..648d3499edb0a8f13031092e37d761368363af08 100644 --- a/crates/gpui_macros/src/test.rs +++ b/crates/gpui_macros/src/test.rs @@ -73,7 +73,7 @@ impl Parse for Args { (Meta::NameValue(meta), "seed") => { seeds = vec![parse_usize_from_expr(&meta.value)? as u64] } - (Meta::List(list), "seeds") => seeds = parse_u64_array(&list)?, + (Meta::List(list), "seeds") => seeds = parse_u64_array(list)?, (Meta::Path(_), _) => { return Err(syn::Error::new(meta.span(), "invalid path argument")); } @@ -86,7 +86,7 @@ impl Parse for Args { Ok(Args { seeds, max_retries, - max_iterations: max_iterations, + max_iterations, on_failure_fn_name, }) } @@ -152,27 +152,28 @@ fn generate_test_function( } _ => {} } - } else if let Type::Reference(ty) = &*arg.ty { - if let Type::Path(ty) = &*ty.elem { - let last_segment = ty.path.segments.last(); - if let Some("TestAppContext") = - last_segment.map(|s| s.ident.to_string()).as_deref() - { - let cx_varname = format_ident!("cx_{}", ix); - cx_vars.extend(quote!( - let mut #cx_varname = gpui::TestAppContext::build( - dispatcher.clone(), - Some(stringify!(#outer_fn_name)), - ); - )); - cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.quit(); - dispatcher.run_until_parked(); - )); - inner_fn_args.extend(quote!(&mut #cx_varname,)); - continue; - } + } else if let Type::Reference(ty) = &*arg.ty + && let Type::Path(ty) = &*ty.elem + { + let last_segment = ty.path.segments.last(); + if let Some("TestAppContext") = + last_segment.map(|s| s.ident.to_string()).as_deref() + { + let cx_varname = format_ident!("cx_{}", ix); + cx_vars.extend(quote!( + let mut #cx_varname = gpui::TestAppContext::build( + dispatcher.clone(), + Some(stringify!(#outer_fn_name)), + ); + )); + cx_teardowns.extend(quote!( + dispatcher.run_until_parked(); + #cx_varname.executor().forbid_parking(); + #cx_varname.quit(); + dispatcher.run_until_parked(); + )); + inner_fn_args.extend(quote!(&mut #cx_varname,)); + continue; } } } @@ -214,47 +215,48 @@ fn generate_test_function( inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(_seed),)); continue; } - } else if let Type::Reference(ty) = &*arg.ty { - if let Type::Path(ty) = &*ty.elem { - let last_segment = ty.path.segments.last(); - match last_segment.map(|s| s.ident.to_string()).as_deref() { - Some("App") => { - let cx_varname = format_ident!("cx_{}", ix); - let cx_varname_lock = format_ident!("cx_{}_lock", ix); - cx_vars.extend(quote!( - let mut #cx_varname = gpui::TestAppContext::build( - dispatcher.clone(), - Some(stringify!(#outer_fn_name)) - ); - let mut #cx_varname_lock = #cx_varname.app.borrow_mut(); - )); - inner_fn_args.extend(quote!(&mut #cx_varname_lock,)); - cx_teardowns.extend(quote!( + } else if let Type::Reference(ty) = &*arg.ty + && let Type::Path(ty) = &*ty.elem + { + let last_segment = ty.path.segments.last(); + match last_segment.map(|s| s.ident.to_string()).as_deref() { + Some("App") => { + let cx_varname = format_ident!("cx_{}", ix); + let cx_varname_lock = format_ident!("cx_{}_lock", ix); + cx_vars.extend(quote!( + let mut #cx_varname = gpui::TestAppContext::build( + dispatcher.clone(), + Some(stringify!(#outer_fn_name)) + ); + let mut #cx_varname_lock = #cx_varname.app.borrow_mut(); + )); + inner_fn_args.extend(quote!(&mut #cx_varname_lock,)); + cx_teardowns.extend(quote!( drop(#cx_varname_lock); dispatcher.run_until_parked(); - #cx_varname.update(|cx| { cx.quit() }); - dispatcher.run_until_parked(); - )); - continue; - } - Some("TestAppContext") => { - let cx_varname = format_ident!("cx_{}", ix); - cx_vars.extend(quote!( - let mut #cx_varname = gpui::TestAppContext::build( - dispatcher.clone(), - Some(stringify!(#outer_fn_name)) - ); - )); - cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.quit(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); dispatcher.run_until_parked(); )); - inner_fn_args.extend(quote!(&mut #cx_varname,)); - continue; - } - _ => {} + continue; } + Some("TestAppContext") => { + let cx_varname = format_ident!("cx_{}", ix); + cx_vars.extend(quote!( + let mut #cx_varname = gpui::TestAppContext::build( + dispatcher.clone(), + Some(stringify!(#outer_fn_name)) + ); + )); + cx_teardowns.extend(quote!( + dispatcher.run_until_parked(); + #cx_varname.executor().forbid_parking(); + #cx_varname.quit(); + dispatcher.run_until_parked(); + )); + inner_fn_args.extend(quote!(&mut #cx_varname,)); + continue; + } + _ => {} } } } diff --git a/crates/gpui_macros/tests/derive_inspector_reflection.rs b/crates/gpui_macros/tests/derive_inspector_reflection.rs index 522c0a62c469cd181c44c465547a8c19c4d04f69..a0adcb7801e55d7272191a1e4e831b2c9c6b115c 100644 --- a/crates/gpui_macros/tests/derive_inspector_reflection.rs +++ b/crates/gpui_macros/tests/derive_inspector_reflection.rs @@ -34,13 +34,6 @@ trait Transform: Clone { /// Adds one to the value fn add_one(self) -> Self; - - /// cfg attributes are respected - #[cfg(all())] - fn cfg_included(self) -> Self; - - #[cfg(any())] - fn cfg_omitted(self) -> Self; } #[derive(Debug, Clone, PartialEq)] @@ -70,10 +63,6 @@ impl Transform for Number { fn add_one(self) -> Self { Number(self.0 + 1) } - - fn cfg_included(self) -> Self { - Number(self.0) - } } #[test] @@ -83,14 +72,13 @@ fn test_derive_inspector_reflection() { // Get all methods that match the pattern fn(self) -> Self or fn(mut self) -> Self let methods = methods::(); - assert_eq!(methods.len(), 6); + assert_eq!(methods.len(), 5); let method_names: Vec<_> = methods.iter().map(|m| m.name).collect(); assert!(method_names.contains(&"double")); assert!(method_names.contains(&"triple")); assert!(method_names.contains(&"increment")); assert!(method_names.contains(&"quadruple")); assert!(method_names.contains(&"add_one")); - assert!(method_names.contains(&"cfg_included")); // Invoke methods by name let num = Number(5); @@ -106,9 +94,7 @@ fn test_derive_inspector_reflection() { .invoke(num.clone()); assert_eq!(incremented, Number(6)); - let quadrupled = find_method::("quadruple") - .unwrap() - .invoke(num.clone()); + let quadrupled = find_method::("quadruple").unwrap().invoke(num); assert_eq!(quadrupled, Number(20)); // Try to invoke a non-existent method diff --git a/crates/html_to_markdown/src/markdown.rs b/crates/html_to_markdown/src/markdown.rs index b9ffbac79c6b6af64222e6447392aa3a75440dda..bb3b3563bcdff8692c80b1b79e7c94d4184bf1cb 100644 --- a/crates/html_to_markdown/src/markdown.rs +++ b/crates/html_to_markdown/src/markdown.rs @@ -34,15 +34,14 @@ impl HandleTag for ParagraphHandler { tag: &HtmlElement, writer: &mut MarkdownWriter, ) -> StartTagOutcome { - if tag.is_inline() && writer.is_inside("p") { - if let Some(parent) = writer.current_element_stack().iter().last() { - if !(parent.is_inline() - || writer.markdown.ends_with(' ') - || writer.markdown.ends_with('\n')) - { - writer.push_str(" "); - } - } + if tag.is_inline() + && writer.is_inside("p") + && let Some(parent) = writer.current_element_stack().iter().last() + && !(parent.is_inline() + || writer.markdown.ends_with(' ') + || writer.markdown.ends_with('\n')) + { + writer.push_str(" "); } if tag.tag() == "p" { diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs index 473849f3cdca785a802590a60cce922c9ee0b5f9..6b99a54a7d941c290f2680bc2a599bc63251e24b 100644 --- a/crates/http_client/src/async_body.rs +++ b/crates/http_client/src/async_body.rs @@ -40,7 +40,7 @@ impl AsyncBody { } pub fn from_bytes(bytes: Bytes) -> Self { - Self(Inner::Bytes(Cursor::new(bytes.clone()))) + Self(Inner::Bytes(Cursor::new(bytes))) } } diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index a19c13b0ff5ef9fec3c1e6223b63c62ed84f03dc..32efed8e727330d3ac1c2fb6d8ea5d57fdd66dd4 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -71,11 +71,19 @@ pub async fn latest_github_release( } }; - releases + let mut release = releases .into_iter() .filter(|release| !require_assets || !release.assets.is_empty()) .find(|release| release.pre_release == pre_release) - .context("finding a prerelease") + .context("finding a prerelease")?; + release.assets.iter_mut().for_each(|asset| { + if let Some(digest) = &mut asset.digest + && let Some(stripped) = digest.strip_prefix("sha256:") + { + *digest = stripped.to_owned(); + } + }); + Ok(release) } pub async fn get_release_by_tag_name( diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index a7f75b0962561ac713e57f9ad26cb64ed82f8003..62468573ed29687c0436e98a0174baa515b0ee3d 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -435,8 +435,7 @@ impl HttpClient for FakeHttpClient { &self, req: Request, ) -> BoxFuture<'static, anyhow::Result>> { - let future = (self.handler.lock().as_ref().unwrap())(req); - future + ((self.handler.lock().as_ref().unwrap())(req)) as _ } fn user_agent(&self) -> Option<&HeaderValue> { diff --git a/crates/icons/README.md b/crates/icons/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e340a00277db558b4bb13b212d53188c0c8fbe5a --- /dev/null +++ b/crates/icons/README.md @@ -0,0 +1,29 @@ +# Zed Icons + +## Guidelines + +Icons are a big part of Zed, and they're how we convey hundreds of actions without relying on labeled buttons. +When introducing a new icon, it's important to ensure consistency with the existing set, which follows these guidelines: + +1. The SVG view box should be 16x16. +2. For outlined icons, use a 1.2px stroke width. +3. Not all icons are mathematically aligned; there's quite a bit of optical adjustment. However, try to keep the icon within an internal 12x12 bounding box as much as possible while ensuring proper visibility. +4. Use the `filled` and `outlined` terminology when introducing icons that will have these two variants. +5. Icons that are deeply contextual may have the feature context as their name prefix. For example, `ToolWeb`, `ReplPlay`, `DebugStepInto`, etc. +6. Avoid complex layer structures in the icon SVG, like clipping masks and similar elements. When the shape becomes too complex, we recommend running the SVG through [SVGOMG](https://jakearchibald.github.io/svgomg/) to clean it up. + +## Sourcing + +Most icons are created by sourcing them from [Lucide](https://lucide.dev/). +Then, they're modified, adjusted, cleaned up, and simplified depending on their use and overall fit with Zed. + +Sometimes, we may use other sources like [Phosphor](https://phosphoricons.com/), but we also design many icons completely from scratch. + +## Contributing + +To introduce a new icon, add the `.svg` file to the `assets/icon` directory and then add its corresponding item to the `icons.rs` file within the `crates` directory. + +- SVG files in the assets folder follow a snake_case name format. +- Icons in the `icons.rs` file follow the PascalCase name format. + +Make sure to tag a member of Zed's design team so we can review and adjust any newly introduced icon. diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 12805e62e061ccd7e91dc0210e68b87752f7ed6c..38f02c2206b3a876b68585d8961f0a7e679a8f32 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -28,16 +28,12 @@ pub enum IconName { ArrowCircle, ArrowDown, ArrowDown10, - ArrowDownFromLine, ArrowDownRight, ArrowLeft, ArrowRight, ArrowRightLeft, ArrowUp, - ArrowUpAlt, - ArrowUpFromLine, ArrowUpRight, - ArrowUpRightAlt, AudioOff, AudioOn, Backspace, @@ -51,28 +47,22 @@ pub enum IconName { BoltFilled, Book, BookCopy, - BugOff, CaseSensitive, Chat, Check, CheckDouble, ChevronDown, - /// This chevron indicates a popover menu. - ChevronDownSmall, ChevronLeft, ChevronRight, ChevronUp, ChevronUpDown, Circle, - CircleOff, CircleHelp, Close, - Cloud, CloudDownload, Code, Cog, Command, - Context, Control, Copilot, CopilotDisabled, @@ -93,16 +83,12 @@ pub enum IconName { DebugIgnoreBreakpoints, DebugLogBreakpoint, DebugPause, - DebugRestart, DebugStepBack, DebugStepInto, DebugStepOut, DebugStepOver, - DebugStop, - Delete, Diff, Disconnected, - DocumentText, Download, EditorAtom, EditorCursor, @@ -113,59 +99,51 @@ pub enum IconName { Ellipsis, EllipsisVertical, Envelope, - Equal, Eraser, Escape, Exit, ExpandDown, ExpandUp, ExpandVertical, - ExternalLink, Eye, File, FileCode, - FileCreate, FileDiff, FileDoc, FileGeneric, FileGit, FileLock, + FileMarkdown, FileRust, - FileSearch, - FileText, + FileTextFilled, + FileTextOutlined, FileToml, FileTree, Filter, Flame, Folder, FolderOpen, - FolderX, + FolderSearch, Font, FontSize, FontWeight, ForwardArrow, - Function, GenericClose, GenericMaximize, GenericMinimize, GenericRestore, GitBranch, - GitBranchSmall, + GitBranchAlt, Github, - Globe, - Hammer, Hash, HistoryRerun, Image, Indicator, Info, - InlayHint, + Json, Keyboard, - Layout, Library, - LightBulb, LineHeight, - Link, ListCollapse, ListTodo, ListTree, @@ -173,35 +151,29 @@ pub enum IconName { LoadCircle, LocationEdit, LockOutlined, - LspDebug, - LspRestart, - LspStop, MagnifyingGlass, - MailOpen, Maximize, Menu, MenuAlt, + MenuAltTemp, Mic, MicMute, Minimize, + Notepad, Option, PageDown, PageUp, - PanelLeft, - PanelRight, Pencil, Person, - PersonCircle, - PhoneIncoming, Pin, PlayOutlined, PlayFilled, Plus, - PocketKnife, Power, Public, PullRequest, Quote, + Reader, RefreshTitle, Regex, ReplNeutral, @@ -213,28 +185,18 @@ pub enum IconName { Return, RotateCcw, RotateCw, - Route, - Save, Scissors, Screen, - ScrollText, - SearchSelection, SelectAll, Send, Server, Settings, - SettingsAlt, ShieldCheck, Shift, Slash, - SlashSquare, Sliders, - SlidersVertical, - Snip, Space, Sparkle, - SparkleAlt, - SparkleFilled, Split, SplitAlt, SquareDot, @@ -243,7 +205,6 @@ pub enum IconName { Star, StarFilled, Stop, - StopFilled, Supermaven, SupermavenDisabled, SupermavenError, @@ -279,18 +240,15 @@ pub enum IconName { TriangleRight, Undo, Unpin, - Update, UserCheck, UserGroup, UserRoundPen, - Visible, - Wand, Warning, WholeWord, - X, XCircle, + XCircleFilled, + ZedAgent, ZedAssistant, - ZedAssistantFilled, ZedBurnMode, ZedBurnModeOn, ZedMcpCustom, diff --git a/crates/indexed_docs/Cargo.toml b/crates/indexed_docs/Cargo.toml deleted file mode 100644 index eb269ad939b59394f12ccceba941585f6dec3ca7..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "indexed_docs" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/indexed_docs.rs" - -[dependencies] -anyhow.workspace = true -async-trait.workspace = true -cargo_metadata.workspace = true -collections.workspace = true -derive_more.workspace = true -extension.workspace = true -fs.workspace = true -futures.workspace = true -fuzzy.workspace = true -gpui.workspace = true -heed.workspace = true -html_to_markdown.workspace = true -http_client.workspace = true -indexmap.workspace = true -parking_lot.workspace = true -paths.workspace = true -serde.workspace = true -strum.workspace = true -util.workspace = true -workspace-hack.workspace = true - -[dev-dependencies] -indoc.workspace = true -pretty_assertions.workspace = true diff --git a/crates/indexed_docs/src/extension_indexed_docs_provider.rs b/crates/indexed_docs/src/extension_indexed_docs_provider.rs deleted file mode 100644 index c77ea4066d2b46f0377d7f64c7d514fd3a95872d..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/extension_indexed_docs_provider.rs +++ /dev/null @@ -1,81 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; - -use anyhow::Result; -use async_trait::async_trait; -use extension::{Extension, ExtensionHostProxy, ExtensionIndexedDocsProviderProxy}; -use gpui::App; - -use crate::{ - IndexedDocsDatabase, IndexedDocsProvider, IndexedDocsRegistry, PackageName, ProviderId, -}; - -pub fn init(cx: &mut App) { - let proxy = ExtensionHostProxy::default_global(cx); - proxy.register_indexed_docs_provider_proxy(IndexedDocsRegistryProxy { - indexed_docs_registry: IndexedDocsRegistry::global(cx), - }); -} - -struct IndexedDocsRegistryProxy { - indexed_docs_registry: Arc, -} - -impl ExtensionIndexedDocsProviderProxy for IndexedDocsRegistryProxy { - fn register_indexed_docs_provider(&self, extension: Arc, provider_id: Arc) { - self.indexed_docs_registry - .register_provider(Box::new(ExtensionIndexedDocsProvider::new( - extension, - ProviderId(provider_id), - ))); - } - - fn unregister_indexed_docs_provider(&self, provider_id: Arc) { - self.indexed_docs_registry - .unregister_provider(&ProviderId(provider_id)); - } -} - -pub struct ExtensionIndexedDocsProvider { - extension: Arc, - id: ProviderId, -} - -impl ExtensionIndexedDocsProvider { - pub fn new(extension: Arc, id: ProviderId) -> Self { - Self { extension, id } - } -} - -#[async_trait] -impl IndexedDocsProvider for ExtensionIndexedDocsProvider { - fn id(&self) -> ProviderId { - self.id.clone() - } - - fn database_path(&self) -> PathBuf { - let mut database_path = PathBuf::from(self.extension.work_dir().as_ref()); - database_path.push("docs"); - database_path.push(format!("{}.0.mdb", self.id)); - - database_path - } - - async fn suggest_packages(&self) -> Result> { - let packages = self - .extension - .suggest_docs_packages(self.id.0.clone()) - .await?; - - Ok(packages - .into_iter() - .map(|package| PackageName::from(package.as_str())) - .collect()) - } - - async fn index(&self, package: PackageName, database: Arc) -> Result<()> { - self.extension - .index_docs(self.id.0.clone(), package.as_ref().into(), database) - .await - } -} diff --git a/crates/indexed_docs/src/indexed_docs.rs b/crates/indexed_docs/src/indexed_docs.rs deleted file mode 100644 index 97538329d4d6265c7587209c679f4e3fa041ce35..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/indexed_docs.rs +++ /dev/null @@ -1,16 +0,0 @@ -mod extension_indexed_docs_provider; -mod providers; -mod registry; -mod store; - -use gpui::App; - -pub use crate::extension_indexed_docs_provider::ExtensionIndexedDocsProvider; -pub use crate::providers::rustdoc::*; -pub use crate::registry::*; -pub use crate::store::*; - -pub fn init(cx: &mut App) { - IndexedDocsRegistry::init_global(cx); - extension_indexed_docs_provider::init(cx); -} diff --git a/crates/indexed_docs/src/providers.rs b/crates/indexed_docs/src/providers.rs deleted file mode 100644 index c6505a2ab667724c83d3f7bed3c1ca16a1423bc5..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/providers.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod rustdoc; diff --git a/crates/indexed_docs/src/providers/rustdoc.rs b/crates/indexed_docs/src/providers/rustdoc.rs deleted file mode 100644 index ac6dc3a10bb3f70f7329b399287124e0417dc0f4..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/providers/rustdoc.rs +++ /dev/null @@ -1,291 +0,0 @@ -mod item; -mod to_markdown; - -use cargo_metadata::MetadataCommand; -use futures::future::BoxFuture; -pub use item::*; -use parking_lot::RwLock; -pub use to_markdown::convert_rustdoc_to_markdown; - -use std::collections::BTreeSet; -use std::path::PathBuf; -use std::sync::{Arc, LazyLock}; -use std::time::{Duration, Instant}; - -use anyhow::{Context as _, Result, bail}; -use async_trait::async_trait; -use collections::{HashSet, VecDeque}; -use fs::Fs; -use futures::{AsyncReadExt, FutureExt}; -use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; - -use crate::{IndexedDocsDatabase, IndexedDocsProvider, PackageName, ProviderId}; - -#[derive(Debug)] -struct RustdocItemWithHistory { - pub item: RustdocItem, - #[cfg(debug_assertions)] - pub history: Vec, -} - -pub struct LocalRustdocProvider { - fs: Arc, - cargo_workspace_root: PathBuf, -} - -impl LocalRustdocProvider { - pub fn id() -> ProviderId { - ProviderId("rustdoc".into()) - } - - pub fn new(fs: Arc, cargo_workspace_root: PathBuf) -> Self { - Self { - fs, - cargo_workspace_root, - } - } -} - -#[async_trait] -impl IndexedDocsProvider for LocalRustdocProvider { - fn id(&self) -> ProviderId { - Self::id() - } - - fn database_path(&self) -> PathBuf { - paths::data_dir().join("docs/rust/rustdoc-db.1.mdb") - } - - async fn suggest_packages(&self) -> Result> { - static WORKSPACE_CRATES: LazyLock, Instant)>>> = - LazyLock::new(|| RwLock::new(None)); - - if let Some((crates, fetched_at)) = &*WORKSPACE_CRATES.read() { - if fetched_at.elapsed() < Duration::from_secs(300) { - return Ok(crates.iter().cloned().collect()); - } - } - - let workspace = MetadataCommand::new() - .manifest_path(self.cargo_workspace_root.join("Cargo.toml")) - .exec() - .context("failed to load cargo metadata")?; - - let workspace_crates = workspace - .packages - .into_iter() - .map(|package| PackageName::from(package.name.as_str())) - .collect::>(); - - *WORKSPACE_CRATES.write() = Some((workspace_crates.clone(), Instant::now())); - - Ok(workspace_crates.into_iter().collect()) - } - - async fn index(&self, package: PackageName, database: Arc) -> Result<()> { - index_rustdoc(package, database, { - move |crate_name, item| { - let fs = self.fs.clone(); - let cargo_workspace_root = self.cargo_workspace_root.clone(); - let crate_name = crate_name.clone(); - let item = item.cloned(); - async move { - let target_doc_path = cargo_workspace_root.join("target/doc"); - let mut local_cargo_doc_path = target_doc_path.join(crate_name.as_ref().replace('-', "_")); - - if !fs.is_dir(&local_cargo_doc_path).await { - let cargo_doc_exists_at_all = fs.is_dir(&target_doc_path).await; - if cargo_doc_exists_at_all { - bail!( - "no docs directory for '{crate_name}'. if this is a valid crate name, try running `cargo doc`" - ); - } else { - bail!("no cargo doc directory. run `cargo doc`"); - } - } - - if let Some(item) = item { - local_cargo_doc_path.push(item.url_path()); - } else { - local_cargo_doc_path.push("index.html"); - } - - let Ok(contents) = fs.load(&local_cargo_doc_path).await else { - return Ok(None); - }; - - Ok(Some(contents)) - } - .boxed() - } - }) - .await - } -} - -pub struct DocsDotRsProvider { - http_client: Arc, -} - -impl DocsDotRsProvider { - pub fn id() -> ProviderId { - ProviderId("docs-rs".into()) - } - - pub fn new(http_client: Arc) -> Self { - Self { http_client } - } -} - -#[async_trait] -impl IndexedDocsProvider for DocsDotRsProvider { - fn id(&self) -> ProviderId { - Self::id() - } - - fn database_path(&self) -> PathBuf { - paths::data_dir().join("docs/rust/docs-rs-db.1.mdb") - } - - async fn suggest_packages(&self) -> Result> { - static POPULAR_CRATES: LazyLock> = LazyLock::new(|| { - include_str!("./rustdoc/popular_crates.txt") - .lines() - .filter(|line| !line.starts_with('#')) - .map(|line| PackageName::from(line.trim())) - .collect() - }); - - Ok(POPULAR_CRATES.clone()) - } - - async fn index(&self, package: PackageName, database: Arc) -> Result<()> { - index_rustdoc(package, database, { - move |crate_name, item| { - let http_client = self.http_client.clone(); - let crate_name = crate_name.clone(); - let item = item.cloned(); - async move { - let version = "latest"; - let path = format!( - "{crate_name}/{version}/{crate_name}{item_path}", - item_path = item - .map(|item| format!("/{}", item.url_path())) - .unwrap_or_default() - ); - - let mut response = http_client - .get( - &format!("https://docs.rs/{path}"), - AsyncBody::default(), - true, - ) - .await?; - - let mut body = Vec::new(); - response - .body_mut() - .read_to_end(&mut body) - .await - .context("error reading docs.rs response body")?; - - if response.status().is_client_error() { - let text = String::from_utf8_lossy(body.as_slice()); - bail!( - "status error {}, response: {text:?}", - response.status().as_u16() - ); - } - - Ok(Some(String::from_utf8(body)?)) - } - .boxed() - } - }) - .await - } -} - -async fn index_rustdoc( - package: PackageName, - database: Arc, - fetch_page: impl Fn( - &PackageName, - Option<&RustdocItem>, - ) -> BoxFuture<'static, Result>> - + Send - + Sync, -) -> Result<()> { - let Some(package_root_content) = fetch_page(&package, None).await? else { - return Ok(()); - }; - - let (crate_root_markdown, items) = - convert_rustdoc_to_markdown(package_root_content.as_bytes())?; - - database - .insert(package.to_string(), crate_root_markdown) - .await?; - - let mut seen_items = HashSet::from_iter(items.clone()); - let mut items_to_visit: VecDeque = - VecDeque::from_iter(items.into_iter().map(|item| RustdocItemWithHistory { - item, - #[cfg(debug_assertions)] - history: Vec::new(), - })); - - while let Some(item_with_history) = items_to_visit.pop_front() { - let item = &item_with_history.item; - - let Some(result) = fetch_page(&package, Some(item)).await.with_context(|| { - #[cfg(debug_assertions)] - { - format!( - "failed to fetch {item:?}: {history:?}", - history = item_with_history.history - ) - } - - #[cfg(not(debug_assertions))] - { - format!("failed to fetch {item:?}") - } - })? - else { - continue; - }; - - let (markdown, referenced_items) = convert_rustdoc_to_markdown(result.as_bytes())?; - - database - .insert(format!("{package}::{}", item.display()), markdown) - .await?; - - let parent_item = item; - for mut item in referenced_items { - if seen_items.contains(&item) { - continue; - } - - seen_items.insert(item.clone()); - - item.path.extend(parent_item.path.clone()); - if parent_item.kind == RustdocItemKind::Mod { - item.path.push(parent_item.name.clone()); - } - - items_to_visit.push_back(RustdocItemWithHistory { - #[cfg(debug_assertions)] - history: { - let mut history = item_with_history.history.clone(); - history.push(item.url_path()); - history - }, - item, - }); - } - } - - Ok(()) -} diff --git a/crates/indexed_docs/src/providers/rustdoc/item.rs b/crates/indexed_docs/src/providers/rustdoc/item.rs deleted file mode 100644 index 7d9023ef3e1bcda298e7c1aaabcf9205f333ee23..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/providers/rustdoc/item.rs +++ /dev/null @@ -1,82 +0,0 @@ -use std::sync::Arc; - -use serde::{Deserialize, Serialize}; -use strum::EnumIter; - -#[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize, EnumIter, -)] -#[serde(rename_all = "snake_case")] -pub enum RustdocItemKind { - Mod, - Macro, - Struct, - Enum, - Constant, - Trait, - Function, - TypeAlias, - AttributeMacro, - DeriveMacro, -} - -impl RustdocItemKind { - pub(crate) const fn class(&self) -> &'static str { - match self { - Self::Mod => "mod", - Self::Macro => "macro", - Self::Struct => "struct", - Self::Enum => "enum", - Self::Constant => "constant", - Self::Trait => "trait", - Self::Function => "fn", - Self::TypeAlias => "type", - Self::AttributeMacro => "attr", - Self::DeriveMacro => "derive", - } - } -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)] -pub struct RustdocItem { - pub kind: RustdocItemKind, - /// The item path, up until the name of the item. - pub path: Vec>, - /// The name of the item. - pub name: Arc, -} - -impl RustdocItem { - pub fn display(&self) -> String { - let mut path_segments = self.path.clone(); - path_segments.push(self.name.clone()); - - path_segments.join("::") - } - - pub fn url_path(&self) -> String { - let name = &self.name; - let mut path_components = self.path.clone(); - - match self.kind { - RustdocItemKind::Mod => { - path_components.push(name.clone()); - path_components.push("index.html".into()); - } - RustdocItemKind::Macro - | RustdocItemKind::Struct - | RustdocItemKind::Enum - | RustdocItemKind::Constant - | RustdocItemKind::Trait - | RustdocItemKind::Function - | RustdocItemKind::TypeAlias - | RustdocItemKind::AttributeMacro - | RustdocItemKind::DeriveMacro => { - path_components - .push(format!("{kind}.{name}.html", kind = self.kind.class()).into()); - } - } - - path_components.join("/") - } -} diff --git a/crates/indexed_docs/src/providers/rustdoc/popular_crates.txt b/crates/indexed_docs/src/providers/rustdoc/popular_crates.txt deleted file mode 100644 index ce2c3d51d834ecca05ebdde23c697b19e356a478..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/providers/rustdoc/popular_crates.txt +++ /dev/null @@ -1,252 +0,0 @@ -# A list of the most popular Rust crates. -# Sourced from https://lib.rs/std. -serde -serde_json -syn -clap -thiserror -rand -log -tokio -anyhow -regex -quote -proc-macro2 -base64 -itertools -chrono -lazy_static -once_cell -libc -reqwest -futures -bitflags -tracing -url -bytes -toml -tempfile -uuid -indexmap -env_logger -num-traits -async-trait -sha2 -hex -tracing-subscriber -http -parking_lot -cfg-if -futures-util -cc -hashbrown -rayon -hyper -getrandom -semver -strum -flate2 -tokio-util -smallvec -criterion -paste -heck -rand_core -nom -rustls -nix -glob -time -byteorder -strum_macros -serde_yaml -wasm-bindgen -ahash -either -num_cpus -rand_chacha -prost -percent-encoding -pin-project-lite -tokio-stream -bincode -walkdir -bindgen -axum -windows-sys -futures-core -ring -digest -num-bigint -rustls-pemfile -serde_with -crossbeam-channel -tokio-rustls -hmac -fastrand -dirs -zeroize -socket2 -pin-project -tower -derive_more -memchr -toml_edit -static_assertions -pretty_assertions -js-sys -convert_case -unicode-width -pkg-config -itoa -colored -rustc-hash -darling -mime -web-sys -image -bytemuck -which -sha1 -dashmap -arrayvec -fnv -tonic -humantime -libloading -winapi -rustc_version -http-body -indoc -num -home -serde_urlencoded -http-body-util -unicode-segmentation -num-integer -webpki-roots -phf -futures-channel -indicatif -petgraph -ordered-float -strsim -zstd -console -encoding_rs -wasm-bindgen-futures -urlencoding -subtle -crc32fast -slab -rustix -predicates -spin -hyper-rustls -backtrace -rustversion -mio -scopeguard -proc-macro-error -hyper-util -ryu -prost-types -textwrap -memmap2 -zip -zerocopy -generic-array -tar -pyo3 -async-stream -quick-xml -memoffset -csv -crossterm -windows -num_enum -tokio-tungstenite -crossbeam-utils -async-channel -lru -aes -futures-lite -tracing-core -prettyplease -httparse -serde_bytes -tracing-log -tower-service -cargo_metadata -pest -mime_guess -tower-http -data-encoding -native-tls -prost-build -proptest -derivative -serial_test -libm -half -futures-io -bitvec -rustls-native-certs -ureq -object -anstyle -tonic-build -form_urlencoded -num-derive -pest_derive -schemars -proc-macro-crate -rstest -futures-executor -assert_cmd -termcolor -serde_repr -ctrlc -sha3 -clap_complete -flume -mockall -ipnet -aho-corasick -atty -signal-hook -async-std -filetime -num-complex -opentelemetry -cmake -arc-swap -derive_builder -async-recursion -dyn-clone -bumpalo -fs_extra -git2 -sysinfo -shlex -instant -approx -rmp-serde -rand_distr -rustls-pki-types -maplit -sqlx -blake3 -hyper-tls -dotenvy -jsonwebtoken -openssl-sys -crossbeam -camino -winreg -config -rsa -bit-vec -chrono-tz -async-lock -bstr diff --git a/crates/indexed_docs/src/providers/rustdoc/to_markdown.rs b/crates/indexed_docs/src/providers/rustdoc/to_markdown.rs deleted file mode 100644 index 87e3863728c5822c19edf4b1dc79283d95b40481..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/providers/rustdoc/to_markdown.rs +++ /dev/null @@ -1,618 +0,0 @@ -use std::cell::RefCell; -use std::io::Read; -use std::rc::Rc; - -use anyhow::Result; -use html_to_markdown::markdown::{ - HeadingHandler, ListHandler, ParagraphHandler, StyledTextHandler, TableHandler, -}; -use html_to_markdown::{ - HandleTag, HandlerOutcome, HtmlElement, MarkdownWriter, StartTagOutcome, TagHandler, - convert_html_to_markdown, -}; -use indexmap::IndexSet; -use strum::IntoEnumIterator; - -use crate::{RustdocItem, RustdocItemKind}; - -/// Converts the provided rustdoc HTML to Markdown. -pub fn convert_rustdoc_to_markdown(html: impl Read) -> Result<(String, Vec)> { - let item_collector = Rc::new(RefCell::new(RustdocItemCollector::new())); - - let mut handlers: Vec = vec![ - Rc::new(RefCell::new(ParagraphHandler)), - Rc::new(RefCell::new(HeadingHandler)), - Rc::new(RefCell::new(ListHandler)), - Rc::new(RefCell::new(TableHandler::new())), - Rc::new(RefCell::new(StyledTextHandler)), - Rc::new(RefCell::new(RustdocChromeRemover)), - Rc::new(RefCell::new(RustdocHeadingHandler)), - Rc::new(RefCell::new(RustdocCodeHandler)), - Rc::new(RefCell::new(RustdocItemHandler)), - item_collector.clone(), - ]; - - let markdown = convert_html_to_markdown(html, &mut handlers)?; - - let items = item_collector - .borrow() - .items - .iter() - .cloned() - .collect::>(); - - Ok((markdown, items)) -} - -pub struct RustdocHeadingHandler; - -impl HandleTag for RustdocHeadingHandler { - fn should_handle(&self, _tag: &str) -> bool { - // We're only handling text, so we don't need to visit any tags. - false - } - - fn handle_text(&mut self, text: &str, writer: &mut MarkdownWriter) -> HandlerOutcome { - if writer.is_inside("h1") - || writer.is_inside("h2") - || writer.is_inside("h3") - || writer.is_inside("h4") - || writer.is_inside("h5") - || writer.is_inside("h6") - { - let text = text - .trim_matches(|char| char == '\n' || char == '\r') - .replace('\n', " "); - writer.push_str(&text); - - return HandlerOutcome::Handled; - } - - HandlerOutcome::NoOp - } -} - -pub struct RustdocCodeHandler; - -impl HandleTag for RustdocCodeHandler { - fn should_handle(&self, tag: &str) -> bool { - matches!(tag, "pre" | "code") - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - match tag.tag() { - "code" => { - if !writer.is_inside("pre") { - writer.push_str("`"); - } - } - "pre" => { - let classes = tag.classes(); - let is_rust = classes.iter().any(|class| class == "rust"); - let language = is_rust - .then_some("rs") - .or_else(|| { - classes.iter().find_map(|class| { - if let Some((_, language)) = class.split_once("language-") { - Some(language.trim()) - } else { - None - } - }) - }) - .unwrap_or(""); - - writer.push_str(&format!("\n\n```{language}\n")); - } - _ => {} - } - - StartTagOutcome::Continue - } - - fn handle_tag_end(&mut self, tag: &HtmlElement, writer: &mut MarkdownWriter) { - match tag.tag() { - "code" => { - if !writer.is_inside("pre") { - writer.push_str("`"); - } - } - "pre" => writer.push_str("\n```\n"), - _ => {} - } - } - - fn handle_text(&mut self, text: &str, writer: &mut MarkdownWriter) -> HandlerOutcome { - if writer.is_inside("pre") { - writer.push_str(text); - return HandlerOutcome::Handled; - } - - HandlerOutcome::NoOp - } -} - -const RUSTDOC_ITEM_NAME_CLASS: &str = "item-name"; - -pub struct RustdocItemHandler; - -impl RustdocItemHandler { - /// Returns whether we're currently inside of an `.item-name` element, which - /// rustdoc uses to display Rust items in a list. - fn is_inside_item_name(writer: &MarkdownWriter) -> bool { - writer - .current_element_stack() - .iter() - .any(|element| element.has_class(RUSTDOC_ITEM_NAME_CLASS)) - } -} - -impl HandleTag for RustdocItemHandler { - fn should_handle(&self, tag: &str) -> bool { - matches!(tag, "div" | "span") - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - match tag.tag() { - "div" | "span" => { - if Self::is_inside_item_name(writer) && tag.has_class("stab") { - writer.push_str(" ["); - } - } - _ => {} - } - - StartTagOutcome::Continue - } - - fn handle_tag_end(&mut self, tag: &HtmlElement, writer: &mut MarkdownWriter) { - match tag.tag() { - "div" | "span" => { - if tag.has_class(RUSTDOC_ITEM_NAME_CLASS) { - writer.push_str(": "); - } - - if Self::is_inside_item_name(writer) && tag.has_class("stab") { - writer.push_str("]"); - } - } - _ => {} - } - } - - fn handle_text(&mut self, text: &str, writer: &mut MarkdownWriter) -> HandlerOutcome { - if Self::is_inside_item_name(writer) - && !writer.is_inside("span") - && !writer.is_inside("code") - { - writer.push_str(&format!("`{text}`")); - return HandlerOutcome::Handled; - } - - HandlerOutcome::NoOp - } -} - -pub struct RustdocChromeRemover; - -impl HandleTag for RustdocChromeRemover { - fn should_handle(&self, tag: &str) -> bool { - matches!( - tag, - "head" | "script" | "nav" | "summary" | "button" | "a" | "div" | "span" - ) - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - _writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - match tag.tag() { - "head" | "script" | "nav" => return StartTagOutcome::Skip, - "summary" => { - if tag.has_class("hideme") { - return StartTagOutcome::Skip; - } - } - "button" => { - if tag.attr("id").as_deref() == Some("copy-path") { - return StartTagOutcome::Skip; - } - } - "a" => { - if tag.has_any_classes(&["anchor", "doc-anchor", "src"]) { - return StartTagOutcome::Skip; - } - } - "div" | "span" => { - if tag.has_any_classes(&["nav-container", "sidebar-elems", "out-of-band"]) { - return StartTagOutcome::Skip; - } - } - - _ => {} - } - - StartTagOutcome::Continue - } -} - -pub struct RustdocItemCollector { - pub items: IndexSet, -} - -impl RustdocItemCollector { - pub fn new() -> Self { - Self { - items: IndexSet::new(), - } - } - - fn parse_item(tag: &HtmlElement) -> Option { - if tag.tag() != "a" { - return None; - } - - let href = tag.attr("href")?; - if href.starts_with('#') || href.starts_with("https://") || href.starts_with("../") { - return None; - } - - for kind in RustdocItemKind::iter() { - if tag.has_class(kind.class()) { - let mut parts = href.trim_end_matches("/index.html").split('/'); - - if let Some(last_component) = parts.next_back() { - let last_component = match last_component.split_once('#') { - Some((component, _fragment)) => component, - None => last_component, - }; - - let name = last_component - .trim_start_matches(&format!("{}.", kind.class())) - .trim_end_matches(".html"); - - return Some(RustdocItem { - kind, - name: name.into(), - path: parts.map(Into::into).collect(), - }); - } - } - } - - None - } -} - -impl HandleTag for RustdocItemCollector { - fn should_handle(&self, tag: &str) -> bool { - tag == "a" - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - if tag.tag() == "a" { - let is_reexport = writer.current_element_stack().iter().any(|element| { - if let Some(id) = element.attr("id") { - id.starts_with("reexport.") || id.starts_with("method.") - } else { - false - } - }); - - if !is_reexport { - if let Some(item) = Self::parse_item(tag) { - self.items.insert(item); - } - } - } - - StartTagOutcome::Continue - } -} - -#[cfg(test)] -mod tests { - use html_to_markdown::{TagHandler, convert_html_to_markdown}; - use indoc::indoc; - use pretty_assertions::assert_eq; - - use super::*; - - fn rustdoc_handlers() -> Vec { - vec![ - Rc::new(RefCell::new(ParagraphHandler)), - Rc::new(RefCell::new(HeadingHandler)), - Rc::new(RefCell::new(ListHandler)), - Rc::new(RefCell::new(TableHandler::new())), - Rc::new(RefCell::new(StyledTextHandler)), - Rc::new(RefCell::new(RustdocChromeRemover)), - Rc::new(RefCell::new(RustdocHeadingHandler)), - Rc::new(RefCell::new(RustdocCodeHandler)), - Rc::new(RefCell::new(RustdocItemHandler)), - ] - } - - #[test] - fn test_main_heading_buttons_get_removed() { - let html = indoc! {r##" - - "##}; - let expected = indoc! {" - # Crate serde - "} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_single_paragraph() { - let html = indoc! {r#" -

In particular, the last point is what sets axum apart from other frameworks. - axum doesn’t have its own middleware system but instead uses - tower::Service. This means axum gets timeouts, tracing, compression, - authorization, and more, for free. It also enables you to share middleware with - applications written using hyper or tonic.

- "#}; - let expected = indoc! {" - In particular, the last point is what sets `axum` apart from other frameworks. `axum` doesn’t have its own middleware system but instead uses `tower::Service`. This means `axum` gets timeouts, tracing, compression, authorization, and more, for free. It also enables you to share middleware with applications written using `hyper` or `tonic`. - "} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_multiple_paragraphs() { - let html = indoc! {r##" -

§Serde

-

Serde is a framework for serializing and deserializing Rust data - structures efficiently and generically.

-

The Serde ecosystem consists of data structures that know how to serialize - and deserialize themselves along with data formats that know how to - serialize and deserialize other things. Serde provides the layer by which - these two groups interact with each other, allowing any supported data - structure to be serialized and deserialized using any supported data format.

-

See the Serde website https://serde.rs/ for additional documentation and - usage examples.

-

§Design

-

Where many other languages rely on runtime reflection for serializing data, - Serde is instead built on Rust’s powerful trait system. A data structure - that knows how to serialize and deserialize itself is one that implements - Serde’s Serialize and Deserialize traits (or uses Serde’s derive - attribute to automatically generate implementations at compile time). This - avoids any overhead of reflection or runtime type information. In fact in - many situations the interaction between data structure and data format can - be completely optimized away by the Rust compiler, leaving Serde - serialization to perform the same speed as a handwritten serializer for the - specific selection of data structure and data format.

- "##}; - let expected = indoc! {" - ## Serde - - Serde is a framework for _**ser**_ializing and _**de**_serializing Rust data structures efficiently and generically. - - The Serde ecosystem consists of data structures that know how to serialize and deserialize themselves along with data formats that know how to serialize and deserialize other things. Serde provides the layer by which these two groups interact with each other, allowing any supported data structure to be serialized and deserialized using any supported data format. - - See the Serde website https://serde.rs/ for additional documentation and usage examples. - - ### Design - - Where many other languages rely on runtime reflection for serializing data, Serde is instead built on Rust’s powerful trait system. A data structure that knows how to serialize and deserialize itself is one that implements Serde’s `Serialize` and `Deserialize` traits (or uses Serde’s derive attribute to automatically generate implementations at compile time). This avoids any overhead of reflection or runtime type information. In fact in many situations the interaction between data structure and data format can be completely optimized away by the Rust compiler, leaving Serde serialization to perform the same speed as a handwritten serializer for the specific selection of data structure and data format. - "} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_styled_text() { - let html = indoc! {r#" -

This text is bolded.

-

This text is italicized.

- "#}; - let expected = indoc! {" - This text is **bolded**. - - This text is _italicized_. - "} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_rust_code_block() { - let html = indoc! {r#" -
use axum::extract::{Path, Query, Json};
-            use std::collections::HashMap;
-
-            // `Path` gives you the path parameters and deserializes them.
-            async fn path(Path(user_id): Path<u32>) {}
-
-            // `Query` gives you the query parameters and deserializes them.
-            async fn query(Query(params): Query<HashMap<String, String>>) {}
-
-            // Buffer the request body and deserialize it as JSON into a
-            // `serde_json::Value`. `Json` supports any type that implements
-            // `serde::Deserialize`.
-            async fn json(Json(payload): Json<serde_json::Value>) {}
- "#}; - let expected = indoc! {" - ```rs - use axum::extract::{Path, Query, Json}; - use std::collections::HashMap; - - // `Path` gives you the path parameters and deserializes them. - async fn path(Path(user_id): Path) {} - - // `Query` gives you the query parameters and deserializes them. - async fn query(Query(params): Query>) {} - - // Buffer the request body and deserialize it as JSON into a - // `serde_json::Value`. `Json` supports any type that implements - // `serde::Deserialize`. - async fn json(Json(payload): Json) {} - ``` - "} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_toml_code_block() { - let html = indoc! {r##" -

§Required dependencies

-

To use axum there are a few dependencies you have to pull in as well:

-
[dependencies]
-            axum = "<latest-version>"
-            tokio = { version = "<latest-version>", features = ["full"] }
-            tower = "<latest-version>"
-            
- "##}; - let expected = indoc! {r#" - ## Required dependencies - - To use axum there are a few dependencies you have to pull in as well: - - ```toml - [dependencies] - axum = "" - tokio = { version = "", features = ["full"] } - tower = "" - - ``` - "#} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_item_table() { - let html = indoc! {r##" -

Structs§

-
    -
  • Errors that can happen when using axum.
  • -
  • Extractor and response for extensions.
  • -
  • Formform
    URL encoded extractor and response.
  • -
  • Jsonjson
    JSON Extractor / Response.
  • -
  • The router type for composing handlers and services.
-

Functions§

-
    -
  • servetokio and (http1 or http2)
    Serve the service with the supplied listener.
  • -
- "##}; - let expected = indoc! {r#" - ## Structs - - - `Error`: Errors that can happen when using axum. - - `Extension`: Extractor and response for extensions. - - `Form` [`form`]: URL encoded extractor and response. - - `Json` [`json`]: JSON Extractor / Response. - - `Router`: The router type for composing handlers and services. - - ## Functions - - - `serve` [`tokio` and (`http1` or `http2`)]: Serve the service with the supplied listener. - "#} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } - - #[test] - fn test_table() { - let html = indoc! {r##" -

§Feature flags

-

axum uses a set of feature flags to reduce the amount of compiled and - optional dependencies.

-

The following optional features are available:

-
- - - - - - - - - - - - - -
NameDescriptionDefault?
http1Enables hyper’s http1 featureYes
http2Enables hyper’s http2 featureNo
jsonEnables the Json type and some similar convenience functionalityYes
macrosEnables optional utility macrosNo
matched-pathEnables capturing of every request’s router path and the MatchedPath extractorYes
multipartEnables parsing multipart/form-data requests with MultipartNo
original-uriEnables capturing of every request’s original URI and the OriginalUri extractorYes
tokioEnables tokio as a dependency and axum::serve, SSE and extract::connect_info types.Yes
tower-logEnables tower’s log featureYes
tracingLog rejections from built-in extractorsYes
wsEnables WebSockets support via extract::wsNo
formEnables the Form extractorYes
queryEnables the Query extractorYes
- "##}; - let expected = indoc! {r#" - ## Feature flags - - axum uses a set of feature flags to reduce the amount of compiled and optional dependencies. - - The following optional features are available: - - | Name | Description | Default? | - | --- | --- | --- | - | `http1` | Enables hyper’s `http1` feature | Yes | - | `http2` | Enables hyper’s `http2` feature | No | - | `json` | Enables the `Json` type and some similar convenience functionality | Yes | - | `macros` | Enables optional utility macros | No | - | `matched-path` | Enables capturing of every request’s router path and the `MatchedPath` extractor | Yes | - | `multipart` | Enables parsing `multipart/form-data` requests with `Multipart` | No | - | `original-uri` | Enables capturing of every request’s original URI and the `OriginalUri` extractor | Yes | - | `tokio` | Enables `tokio` as a dependency and `axum::serve`, `SSE` and `extract::connect_info` types. | Yes | - | `tower-log` | Enables `tower`’s `log` feature | Yes | - | `tracing` | Log rejections from built-in extractors | Yes | - | `ws` | Enables WebSockets support via `extract::ws` | No | - | `form` | Enables the `Form` extractor | Yes | - | `query` | Enables the `Query` extractor | Yes | - "#} - .trim(); - - assert_eq!( - convert_html_to_markdown(html.as_bytes(), &mut rustdoc_handlers()).unwrap(), - expected - ) - } -} diff --git a/crates/indexed_docs/src/registry.rs b/crates/indexed_docs/src/registry.rs deleted file mode 100644 index 6757cd9c1a1a324e43765df618e01397a8b85708..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/registry.rs +++ /dev/null @@ -1,62 +0,0 @@ -use std::sync::Arc; - -use collections::HashMap; -use gpui::{App, BackgroundExecutor, Global, ReadGlobal, UpdateGlobal}; -use parking_lot::RwLock; - -use crate::{IndexedDocsProvider, IndexedDocsStore, ProviderId}; - -struct GlobalIndexedDocsRegistry(Arc); - -impl Global for GlobalIndexedDocsRegistry {} - -pub struct IndexedDocsRegistry { - executor: BackgroundExecutor, - stores_by_provider: RwLock>>, -} - -impl IndexedDocsRegistry { - pub fn global(cx: &App) -> Arc { - GlobalIndexedDocsRegistry::global(cx).0.clone() - } - - pub(crate) fn init_global(cx: &mut App) { - GlobalIndexedDocsRegistry::set_global( - cx, - GlobalIndexedDocsRegistry(Arc::new(Self::new(cx.background_executor().clone()))), - ); - } - - pub fn new(executor: BackgroundExecutor) -> Self { - Self { - executor, - stores_by_provider: RwLock::new(HashMap::default()), - } - } - - pub fn list_providers(&self) -> Vec { - self.stores_by_provider - .read() - .keys() - .cloned() - .collect::>() - } - - pub fn register_provider( - &self, - provider: Box, - ) { - self.stores_by_provider.write().insert( - provider.id(), - Arc::new(IndexedDocsStore::new(provider, self.executor.clone())), - ); - } - - pub fn unregister_provider(&self, provider_id: &ProviderId) { - self.stores_by_provider.write().remove(provider_id); - } - - pub fn get_provider_store(&self, provider_id: ProviderId) -> Option> { - self.stores_by_provider.read().get(&provider_id).cloned() - } -} diff --git a/crates/indexed_docs/src/store.rs b/crates/indexed_docs/src/store.rs deleted file mode 100644 index 1407078efaf122acfc07e56894d3fd803605a42a..0000000000000000000000000000000000000000 --- a/crates/indexed_docs/src/store.rs +++ /dev/null @@ -1,346 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use anyhow::{Context as _, Result, anyhow}; -use async_trait::async_trait; -use collections::HashMap; -use derive_more::{Deref, Display}; -use futures::FutureExt; -use futures::future::{self, BoxFuture, Shared}; -use fuzzy::StringMatchCandidate; -use gpui::{App, BackgroundExecutor, Task}; -use heed::Database; -use heed::types::SerdeBincode; -use parking_lot::RwLock; -use serde::{Deserialize, Serialize}; -use util::ResultExt; - -use crate::IndexedDocsRegistry; - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Deref, Display)] -pub struct ProviderId(pub Arc); - -/// The name of a package. -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Deref, Display)] -pub struct PackageName(Arc); - -impl From<&str> for PackageName { - fn from(value: &str) -> Self { - Self(value.into()) - } -} - -#[async_trait] -pub trait IndexedDocsProvider { - /// Returns the ID of this provider. - fn id(&self) -> ProviderId; - - /// Returns the path to the database for this provider. - fn database_path(&self) -> PathBuf; - - /// Returns a list of packages as suggestions to be included in the search - /// results. - /// - /// This can be used to provide completions for known packages (e.g., from the - /// local project or a registry) before a package has been indexed. - async fn suggest_packages(&self) -> Result>; - - /// Indexes the package with the given name. - async fn index(&self, package: PackageName, database: Arc) -> Result<()>; -} - -/// A store for indexed docs. -pub struct IndexedDocsStore { - executor: BackgroundExecutor, - database_future: - Shared, Arc>>>, - provider: Box, - indexing_tasks_by_package: - RwLock>>>>>, - latest_errors_by_package: RwLock>>, -} - -impl IndexedDocsStore { - pub fn try_global(provider: ProviderId, cx: &App) -> Result> { - let registry = IndexedDocsRegistry::global(cx); - registry - .get_provider_store(provider.clone()) - .with_context(|| format!("no indexed docs store found for {provider}")) - } - - pub fn new( - provider: Box, - executor: BackgroundExecutor, - ) -> Self { - let database_future = executor - .spawn({ - let executor = executor.clone(); - let database_path = provider.database_path(); - async move { IndexedDocsDatabase::new(database_path, executor) } - }) - .then(|result| future::ready(result.map(Arc::new).map_err(Arc::new))) - .boxed() - .shared(); - - Self { - executor, - database_future, - provider, - indexing_tasks_by_package: RwLock::new(HashMap::default()), - latest_errors_by_package: RwLock::new(HashMap::default()), - } - } - - pub fn latest_error_for_package(&self, package: &PackageName) -> Option> { - self.latest_errors_by_package.read().get(package).cloned() - } - - /// Returns whether the package with the given name is currently being indexed. - pub fn is_indexing(&self, package: &PackageName) -> bool { - self.indexing_tasks_by_package.read().contains_key(package) - } - - pub async fn load(&self, key: String) -> Result { - self.database_future - .clone() - .await - .map_err(|err| anyhow!(err))? - .load(key) - .await - } - - pub async fn load_many_by_prefix(&self, prefix: String) -> Result> { - self.database_future - .clone() - .await - .map_err(|err| anyhow!(err))? - .load_many_by_prefix(prefix) - .await - } - - /// Returns whether any entries exist with the given prefix. - pub async fn any_with_prefix(&self, prefix: String) -> Result { - self.database_future - .clone() - .await - .map_err(|err| anyhow!(err))? - .any_with_prefix(prefix) - .await - } - - pub fn suggest_packages(self: Arc) -> Task>> { - let this = self.clone(); - self.executor - .spawn(async move { this.provider.suggest_packages().await }) - } - - pub fn index( - self: Arc, - package: PackageName, - ) -> Shared>>> { - if let Some(existing_task) = self.indexing_tasks_by_package.read().get(&package) { - return existing_task.clone(); - } - - let indexing_task = self - .executor - .spawn({ - let this = self.clone(); - let package = package.clone(); - async move { - let _finally = util::defer({ - let this = this.clone(); - let package = package.clone(); - move || { - this.indexing_tasks_by_package.write().remove(&package); - } - }); - - let index_task = { - let package = package.clone(); - async { - let database = this - .database_future - .clone() - .await - .map_err(|err| anyhow!(err))?; - this.provider.index(package, database).await - } - }; - - let result = index_task.await.map_err(Arc::new); - match &result { - Ok(_) => { - this.latest_errors_by_package.write().remove(&package); - } - Err(err) => { - this.latest_errors_by_package - .write() - .insert(package, err.to_string().into()); - } - } - - result - } - }) - .shared(); - - self.indexing_tasks_by_package - .write() - .insert(package, indexing_task.clone()); - - indexing_task - } - - pub fn search(&self, query: String) -> Task> { - let executor = self.executor.clone(); - let database_future = self.database_future.clone(); - self.executor.spawn(async move { - let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { - return Vec::new(); - }; - - let Some(items) = database.keys().await.log_err() else { - return Vec::new(); - }; - - let candidates = items - .iter() - .enumerate() - .map(|(ix, item_path)| StringMatchCandidate::new(ix, &item_path)) - .collect::>(); - - let matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - 100, - &AtomicBool::default(), - executor, - ) - .await; - - matches - .into_iter() - .map(|mat| items[mat.candidate_id].clone()) - .collect() - }) - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Display, Serialize, Deserialize)] -pub struct MarkdownDocs(pub String); - -pub struct IndexedDocsDatabase { - executor: BackgroundExecutor, - env: heed::Env, - entries: Database, SerdeBincode>, -} - -impl IndexedDocsDatabase { - pub fn new(path: PathBuf, executor: BackgroundExecutor) -> Result { - std::fs::create_dir_all(&path)?; - - const ONE_GB_IN_BYTES: usize = 1024 * 1024 * 1024; - let env = unsafe { - heed::EnvOpenOptions::new() - .map_size(ONE_GB_IN_BYTES) - .max_dbs(1) - .open(path)? - }; - - let mut txn = env.write_txn()?; - let entries = env.create_database(&mut txn, Some("rustdoc_entries"))?; - txn.commit()?; - - Ok(Self { - executor, - env, - entries, - }) - } - - pub fn keys(&self) -> Task>> { - let env = self.env.clone(); - let entries = self.entries; - - self.executor.spawn(async move { - let txn = env.read_txn()?; - let mut iter = entries.iter(&txn)?; - let mut keys = Vec::new(); - while let Some((key, _value)) = iter.next().transpose()? { - keys.push(key); - } - - Ok(keys) - }) - } - - pub fn load(&self, key: String) -> Task> { - let env = self.env.clone(); - let entries = self.entries; - - self.executor.spawn(async move { - let txn = env.read_txn()?; - entries - .get(&txn, &key)? - .with_context(|| format!("no docs found for {key}")) - }) - } - - pub fn load_many_by_prefix(&self, prefix: String) -> Task>> { - let env = self.env.clone(); - let entries = self.entries; - - self.executor.spawn(async move { - let txn = env.read_txn()?; - let results = entries - .iter(&txn)? - .filter_map(|entry| { - let (key, value) = entry.ok()?; - if key.starts_with(&prefix) { - Some((key, value)) - } else { - None - } - }) - .collect::>(); - - Ok(results) - }) - } - - /// Returns whether any entries exist with the given prefix. - pub fn any_with_prefix(&self, prefix: String) -> Task> { - let env = self.env.clone(); - let entries = self.entries; - - self.executor.spawn(async move { - let txn = env.read_txn()?; - let any = entries - .iter(&txn)? - .any(|entry| entry.map_or(false, |(key, _value)| key.starts_with(&prefix))); - Ok(any) - }) - } - - pub fn insert(&self, key: String, docs: String) -> Task> { - let env = self.env.clone(); - let entries = self.entries; - - self.executor.spawn(async move { - let mut txn = env.write_txn()?; - entries.put(&mut txn, &key, &MarkdownDocs(docs))?; - txn.commit()?; - Ok(()) - }) - } -} - -impl extension::KeyValueStoreDelegate for IndexedDocsDatabase { - fn insert(&self, key: String, docs: String) -> Task> { - IndexedDocsDatabase::insert(&self, key, docs) - } -} diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index bd395aa01bca42ce923073ee6f80472abc7820eb..0c2b16b9f49019bff1f7860ae5cd658ae20f12b5 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -93,8 +93,8 @@ impl DivInspector { Ok((json_style_buffer, rust_style_buffer)) => { this.update_in(cx, |this, window, cx| { this.state = State::BuffersLoaded { - json_style_buffer: json_style_buffer, - rust_style_buffer: rust_style_buffer, + json_style_buffer, + rust_style_buffer, }; // Initialize editors immediately instead of waiting for @@ -200,8 +200,8 @@ impl DivInspector { cx.subscribe_in(&json_style_editor, window, { let id = id.clone(); let rust_style_buffer = rust_style_buffer.clone(); - move |this, editor, event: &EditorEvent, window, cx| match event { - EditorEvent::BufferEdited => { + move |this, editor, event: &EditorEvent, window, cx| { + if event == &EditorEvent::BufferEdited { let style_json = editor.read(cx).text(cx); match serde_json_lenient::from_str_lenient::(&style_json) { Ok(new_style) => { @@ -243,7 +243,6 @@ impl DivInspector { Err(err) => this.json_style_error = Some(err.to_string().into()), } } - _ => {} } }) .detach(); @@ -251,11 +250,10 @@ impl DivInspector { cx.subscribe(&rust_style_editor, { let json_style_buffer = json_style_buffer.clone(); let rust_style_buffer = rust_style_buffer.clone(); - move |this, _editor, event: &EditorEvent, cx| match event { - EditorEvent::BufferEdited => { + move |this, _editor, event: &EditorEvent, cx| { + if let EditorEvent::BufferEdited = event { this.update_json_style_from_rust(&json_style_buffer, &rust_style_buffer, cx); } - _ => {} } }) .detach(); @@ -271,23 +269,19 @@ impl DivInspector { } fn reset_style(&mut self, cx: &mut App) { - match &self.state { - State::Ready { - rust_style_buffer, - json_style_buffer, - .. - } => { - if let Err(err) = self.reset_style_editors( - &rust_style_buffer.clone(), - &json_style_buffer.clone(), - cx, - ) { - self.json_style_error = Some(format!("{err}").into()); - } else { - self.json_style_error = None; - } + if let State::Ready { + rust_style_buffer, + json_style_buffer, + .. + } = &self.state + { + if let Err(err) = + self.reset_style_editors(&rust_style_buffer.clone(), &json_style_buffer.clone(), cx) + { + self.json_style_error = Some(format!("{err}").into()); + } else { + self.json_style_error = None; } - _ => {} } } @@ -395,11 +389,11 @@ impl DivInspector { .zip(self.rust_completion_replace_range.as_ref()) { let before_text = snapshot - .text_for_range(0..completion_range.start.to_offset(&snapshot)) + .text_for_range(0..completion_range.start.to_offset(snapshot)) .collect::(); let after_text = snapshot .text_for_range( - completion_range.end.to_offset(&snapshot) + completion_range.end.to_offset(snapshot) ..snapshot.clip_offset(usize::MAX, Bias::Left), ) .collect::(); @@ -702,10 +696,10 @@ impl CompletionProvider for RustStyleCompletionProvider { } fn completion_replace_range(snapshot: &BufferSnapshot, anchor: &Anchor) -> Option> { - let point = anchor.to_point(&snapshot); - let offset = point.to_offset(&snapshot); - let line_start = Point::new(point.row, 0).to_offset(&snapshot); - let line_end = Point::new(point.row, snapshot.line_len(point.row)).to_offset(&snapshot); + let point = anchor.to_point(snapshot); + let offset = point.to_offset(snapshot); + let line_start = Point::new(point.row, 0).to_offset(snapshot); + let line_end = Point::new(point.row, snapshot.line_len(point.row)).to_offset(snapshot); let mut lines = snapshot.text_for_range(line_start..line_end).lines(); let line = lines.next()?; diff --git a/crates/install_cli/src/install_cli.rs b/crates/install_cli/src/install_cli.rs index 12c094448b8362c8d638ac62da5838544b4fcc6d..dc9e0e31ab093ec2414aafb4f4885395ee3b2efc 100644 --- a/crates/install_cli/src/install_cli.rs +++ b/crates/install_cli/src/install_cli.rs @@ -105,7 +105,7 @@ pub fn install_cli(window: &mut Window, cx: &mut Context) { cx, ) })?; - register_zed_scheme(&cx).await.log_err(); + register_zed_scheme(cx).await.log_err(); Ok(()) }) .detach_and_prompt_err("Error installing zed cli", window, cx, |_, _, _| None); diff --git a/crates/jj/src/jj_repository.rs b/crates/jj/src/jj_repository.rs index 93ae79eb90992a8fc71804788325683eae800cb4..afbe54c99dcb40a039e8f7cc87c14dc393ebac3a 100644 --- a/crates/jj/src/jj_repository.rs +++ b/crates/jj/src/jj_repository.rs @@ -50,16 +50,13 @@ impl RealJujutsuRepository { impl JujutsuRepository for RealJujutsuRepository { fn list_bookmarks(&self) -> Vec { - let bookmarks = self - .repository + self.repository .view() .bookmarks() .map(|(ref_name, _target)| Bookmark { ref_name: ref_name.as_str().to_string().into(), }) - .collect(); - - bookmarks + .collect() } } diff --git a/crates/jj/src/jj_store.rs b/crates/jj/src/jj_store.rs index a10f06fad48a3867ce6e19ffb5fc721c931ae6e4..2d2d958d7f964cdfc7723827fb2241e50d172697 100644 --- a/crates/jj/src/jj_store.rs +++ b/crates/jj/src/jj_store.rs @@ -16,7 +16,7 @@ pub struct JujutsuStore { impl JujutsuStore { pub fn init_global(cx: &mut App) { - let Some(repository) = RealJujutsuRepository::new(&Path::new(".")).ok() else { + let Some(repository) = RealJujutsuRepository::new(Path::new(".")).ok() else { return; }; diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index 0335a746cd23eb2654dac7f8960a649aa3c269ff..c09ab6f764893589945f2c3cc00d71df84b8f77a 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -123,7 +123,7 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap } let app_state = workspace.app_state().clone(); - let view_snapshot = workspace.weak_handle().clone(); + let view_snapshot = workspace.weak_handle(); window .spawn(cx, async move |cx| { @@ -170,23 +170,23 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap .await }; - if let Some(Some(Ok(item))) = opened.first() { - if let Some(editor) = item.downcast::().map(|editor| editor.downgrade()) { - editor.update_in(cx, |editor, window, cx| { - let len = editor.buffer().read(cx).len(cx); - editor.change_selections( - SelectionEffects::scroll(Autoscroll::center()), - window, - cx, - |s| s.select_ranges([len..len]), - ); - if len > 0 { - editor.insert("\n\n", window, cx); - } - editor.insert(&entry_heading, window, cx); + if let Some(Some(Ok(item))) = opened.first() + && let Some(editor) = item.downcast::().map(|editor| editor.downgrade()) + { + editor.update_in(cx, |editor, window, cx| { + let len = editor.buffer().read(cx).len(cx); + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |s| s.select_ranges([len..len]), + ); + if len > 0 { editor.insert("\n\n", window, cx); - })?; - } + } + editor.insert(&entry_heading, window, cx); + editor.insert("\n\n", window, cx); + })?; } anyhow::Ok(()) @@ -195,11 +195,9 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap } fn journal_dir(path: &str) -> Option { - let expanded_journal_dir = shellexpand::full(path) //TODO handle this better + shellexpand::full(path) //TODO handle this better .ok() - .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal")); - - expanded_journal_dir + .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal")) } fn heading_entry(now: NaiveTime, hour_format: &Option) -> String { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 83517accc239ecf9d2196f124fc5695a8545ef17..b106110c33d12a54cb7c4731598306c5da14abe9 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -716,7 +716,7 @@ impl EditPreview { &self.applied_edits_snapshot, &self.syntax_snapshot, None, - &syntax_theme, + syntax_theme, ); } @@ -727,7 +727,7 @@ impl EditPreview { ¤t_snapshot.text, ¤t_snapshot.syntax, Some(deletion_highlight_style), - &syntax_theme, + syntax_theme, ); } @@ -737,7 +737,7 @@ impl EditPreview { &self.applied_edits_snapshot, &self.syntax_snapshot, Some(insertion_highlight_style), - &syntax_theme, + syntax_theme, ); } @@ -749,7 +749,7 @@ impl EditPreview { &self.applied_edits_snapshot, &self.syntax_snapshot, None, - &syntax_theme, + syntax_theme, ); highlighted_text.build() @@ -974,8 +974,6 @@ impl Buffer { TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); let mut syntax = SyntaxMap::new(&text).snapshot(); if let Some(language) = language.clone() { - let text = text.clone(); - let language = language.clone(); let language_registry = language_registry.clone(); syntax.reparse(&text, language_registry, language); } @@ -1020,9 +1018,6 @@ impl Buffer { let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); let mut syntax = SyntaxMap::new(&text).snapshot(); if let Some(language) = language.clone() { - let text = text.clone(); - let language = language.clone(); - let language_registry = language_registry.clone(); syntax.reparse(&text, language_registry, language); } BufferSnapshot { @@ -1128,7 +1123,7 @@ impl Buffer { } else { ranges.as_slice() } - .into_iter() + .iter() .peekable(); let mut edits = Vec::new(); @@ -1158,13 +1153,12 @@ impl Buffer { base_buffer.edit(edits, None, cx) }); - if let Some(operation) = operation { - if let Some(BufferBranchState { + if let Some(operation) = operation + && let Some(BufferBranchState { merged_operations, .. }) = &mut self.branch_state - { - merged_operations.push(operation); - } + { + merged_operations.push(operation); } } @@ -1185,11 +1179,11 @@ impl Buffer { }; let mut operation_to_undo = None; - if let Operation::Buffer(text::Operation::Edit(operation)) = &operation { - if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) { - merged_operations.remove(ix); - operation_to_undo = Some(operation.timestamp); - } + if let Operation::Buffer(text::Operation::Edit(operation)) = &operation + && let Ok(ix) = merged_operations.binary_search(&operation.timestamp) + { + merged_operations.remove(ix); + operation_to_undo = Some(operation.timestamp); } self.apply_ops([operation.clone()], cx); @@ -1396,7 +1390,8 @@ impl Buffer { is_first = false; return true; } - let any_sub_ranges_contain_range = layer + + layer .included_sub_ranges .map(|sub_ranges| { sub_ranges.iter().any(|sub_range| { @@ -1405,9 +1400,7 @@ impl Buffer { !is_before_start && !is_after_end }) }) - .unwrap_or(true); - let result = any_sub_ranges_contain_range; - return result; + .unwrap_or(true) }) .last() .map(|info| info.language.clone()) @@ -1424,10 +1417,10 @@ impl Buffer { .map(|info| info.language.clone()) .collect(); - if languages.is_empty() { - if let Some(buffer_language) = self.language() { - languages.push(buffer_language.clone()); - } + if languages.is_empty() + && let Some(buffer_language) = self.language() + { + languages.push(buffer_language.clone()); } languages @@ -1521,12 +1514,12 @@ impl Buffer { let new_syntax_map = parse_task.await; this.update(cx, move |this, cx| { let grammar_changed = - this.language.as_ref().map_or(true, |current_language| { + this.language.as_ref().is_none_or(|current_language| { !Arc::ptr_eq(&language, current_language) }); let language_registry_changed = new_syntax_map .contains_unknown_injections() - && language_registry.map_or(false, |registry| { + && language_registry.is_some_and(|registry| { registry.version() != new_syntax_map.language_registry_version() }); let parse_again = language_registry_changed @@ -1571,6 +1564,7 @@ impl Buffer { diagnostics: diagnostics.iter().cloned().collect(), lamport_timestamp, }; + self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx); self.send_operation(op, true, cx); } @@ -1719,8 +1713,7 @@ impl Buffer { }) .with_delta(suggestion.delta, language_indent_size); - if old_suggestions.get(&new_row).map_or( - true, + if old_suggestions.get(&new_row).is_none_or( |(old_indentation, was_within_error)| { suggested_indent != *old_indentation && (!suggestion.within_error || *was_within_error) @@ -2014,7 +2007,7 @@ impl Buffer { fn was_changed(&mut self) { self.change_bits.retain(|change_bit| { - change_bit.upgrade().map_or(false, |bit| { + change_bit.upgrade().is_some_and(|bit| { bit.replace(true); true }) @@ -2191,7 +2184,7 @@ impl Buffer { if self .remote_selections .get(&self.text.replica_id()) - .map_or(true, |set| !set.selections.is_empty()) + .is_none_or(|set| !set.selections.is_empty()) { self.set_active_selections(Arc::default(), false, Default::default(), cx); } @@ -2208,7 +2201,7 @@ impl Buffer { self.remote_selections.insert( AGENT_REPLICA_ID, SelectionSet { - selections: selections.clone(), + selections, lamport_timestamp, line_mode, cursor_shape, @@ -2270,13 +2263,11 @@ impl Buffer { } let new_text = new_text.into(); if !new_text.is_empty() || !range.is_empty() { - if let Some((prev_range, prev_text)) = edits.last_mut() { - if prev_range.end >= range.start { - prev_range.end = cmp::max(prev_range.end, range.end); - *prev_text = format!("{prev_text}{new_text}").into(); - } else { - edits.push((range, new_text)); - } + if let Some((prev_range, prev_text)) = edits.last_mut() + && prev_range.end >= range.start + { + prev_range.end = cmp::max(prev_range.end, range.end); + *prev_text = format!("{prev_text}{new_text}").into(); } else { edits.push((range, new_text)); } @@ -2296,10 +2287,27 @@ impl Buffer { if let Some((before_edit, mode)) = autoindent_request { let mut delta = 0isize; - let entries = edits + let mut previous_setting = None; + let entries: Vec<_> = edits .into_iter() .enumerate() .zip(&edit_operation.as_edit().unwrap().new_text) + .filter(|((_, (range, _)), _)| { + let language = before_edit.language_at(range.start); + let language_id = language.map(|l| l.id()); + if let Some((cached_language_id, auto_indent)) = previous_setting + && cached_language_id == language_id + { + auto_indent + } else { + // The auto-indent setting is not present in editorconfigs, hence + // we can avoid passing the file here. + let auto_indent = + language_settings(language.map(|l| l.name()), None, cx).auto_indent; + previous_setting = Some((language_id, auto_indent)); + auto_indent + } + }) .map(|((ix, (range, _)), new_text)| { let new_text_length = new_text.len(); let old_start = range.start.to_point(&before_edit); @@ -2373,12 +2381,14 @@ impl Buffer { }) .collect(); - self.autoindent_requests.push(Arc::new(AutoindentRequest { - before_edit, - entries, - is_block_mode: matches!(mode, AutoindentMode::Block { .. }), - ignore_empty_lines: false, - })); + if !entries.is_empty() { + self.autoindent_requests.push(Arc::new(AutoindentRequest { + before_edit, + entries, + is_block_mode: matches!(mode, AutoindentMode::Block { .. }), + ignore_empty_lines: false, + })); + } } self.end_transaction(cx); @@ -2571,10 +2581,10 @@ impl Buffer { line_mode, cursor_shape, } => { - if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) { - if set.lamport_timestamp > lamport_timestamp { - return; - } + if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) + && set.lamport_timestamp > lamport_timestamp + { + return; } self.remote_selections.insert( @@ -2600,7 +2610,7 @@ impl Buffer { self.completion_triggers = self .completion_triggers_per_language_server .values() - .flat_map(|triggers| triggers.into_iter().cloned()) + .flat_map(|triggers| triggers.iter().cloned()) .collect(); } else { self.completion_triggers_per_language_server @@ -2760,7 +2770,7 @@ impl Buffer { self.completion_triggers = self .completion_triggers_per_language_server .values() - .flat_map(|triggers| triggers.into_iter().cloned()) + .flat_map(|triggers| triggers.iter().cloned()) .collect(); } else { self.completion_triggers_per_language_server @@ -2822,7 +2832,7 @@ impl Buffer { let mut edits: Vec<(Range, String)> = Vec::new(); let mut last_end = None; for _ in 0..old_range_count { - if last_end.map_or(false, |last_end| last_end >= self.len()) { + if last_end.is_some_and(|last_end| last_end >= self.len()) { break; } @@ -2991,9 +3001,9 @@ impl BufferSnapshot { } let mut error_ranges = Vec::>::new(); - let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| { - grammar.error_query.as_ref() - }); + let mut matches = self + .syntax + .matches(range, &self.text, |grammar| grammar.error_query.as_ref()); while let Some(mat) = matches.peek() { let node = mat.captures[0].node; let start = Point::from_ts_point(node.start_position()); @@ -3042,14 +3052,14 @@ impl BufferSnapshot { if config .decrease_indent_pattern .as_ref() - .map_or(false, |regex| regex.is_match(line)) + .is_some_and(|regex| regex.is_match(line)) { indent_change_rows.push((row, Ordering::Less)); } if config .increase_indent_pattern .as_ref() - .map_or(false, |regex| regex.is_match(line)) + .is_some_and(|regex| regex.is_match(line)) { indent_change_rows.push((row + 1, Ordering::Greater)); } @@ -3065,7 +3075,7 @@ impl BufferSnapshot { } } for rule in &config.decrease_indent_patterns { - if rule.pattern.as_ref().map_or(false, |r| r.is_match(line)) { + if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) { let row_start_column = self.indent_size_for_line(row).len; let basis_row = rule .valid_after @@ -3278,8 +3288,7 @@ impl BufferSnapshot { range: Range, ) -> Option> { let range = range.to_offset(self); - return self - .syntax + self.syntax .layers_for_range(range, &self.text, false) .max_by(|a, b| { if a.depth != b.depth { @@ -3289,7 +3298,7 @@ impl BufferSnapshot { } else { a.node().end_byte().cmp(&b.node().end_byte()).reverse() } - }); + }) } /// Returns the main [`Language`]. @@ -3347,9 +3356,8 @@ impl BufferSnapshot { } } - if let Some(range) = range { - if smallest_range_and_depth.as_ref().map_or( - true, + if let Some(range) = range + && smallest_range_and_depth.as_ref().is_none_or( |(smallest_range, smallest_range_depth)| { if layer.depth > *smallest_range_depth { true @@ -3359,13 +3367,13 @@ impl BufferSnapshot { false } }, - ) { - smallest_range_and_depth = Some((range, layer.depth)); - scope = Some(LanguageScope { - language: layer.language.clone(), - override_id: layer.override_id(offset, &self.text), - }); - } + ) + { + smallest_range_and_depth = Some((range, layer.depth)); + scope = Some(LanguageScope { + language: layer.language.clone(), + override_id: layer.override_id(offset, &self.text), + }); } } @@ -3481,17 +3489,17 @@ impl BufferSnapshot { // If there is a candidate node on both sides of the (empty) range, then // decide between the two by favoring a named node over an anonymous token. // If both nodes are the same in that regard, favor the right one. - if let Some(right_node) = right_node { - if right_node.is_named() || !left_node.is_named() { - layer_result = right_node; - } + if let Some(right_node) = right_node + && (right_node.is_named() || !left_node.is_named()) + { + layer_result = right_node; } } - if let Some(previous_result) = &result { - if previous_result.byte_range().len() < layer_result.byte_range().len() { - continue; - } + if let Some(previous_result) = &result + && previous_result.byte_range().len() < layer_result.byte_range().len() + { + continue; } result = Some(layer_result); } @@ -3526,7 +3534,7 @@ impl BufferSnapshot { } } - return Some(cursor.node()); + Some(cursor.node()) } /// Returns the outline for the buffer. @@ -3555,7 +3563,7 @@ impl BufferSnapshot { )?; let mut prev_depth = None; items.retain(|item| { - let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth); + let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth); prev_depth = Some(item.depth); result }); @@ -4062,11 +4070,11 @@ impl BufferSnapshot { // Get the ranges of the innermost pair of brackets. let mut result: Option<(Range, Range)> = None; - for pair in self.enclosing_bracket_ranges(range.clone()) { - if let Some(range_filter) = range_filter { - if !range_filter(pair.open_range.clone(), pair.close_range.clone()) { - continue; - } + for pair in self.enclosing_bracket_ranges(range) { + if let Some(range_filter) = range_filter + && !range_filter(pair.open_range.clone(), pair.close_range.clone()) + { + continue; } let len = pair.close_range.end - pair.open_range.start; @@ -4235,7 +4243,7 @@ impl BufferSnapshot { .map(|(range, name)| { ( name.to_string(), - self.text_for_range(range.clone()).collect::(), + self.text_for_range(range).collect::(), ) }) .collect(); @@ -4432,7 +4440,7 @@ impl BufferSnapshot { pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap> { let query_str = query.fuzzy_contents; - if query_str.map_or(false, |query| query.is_empty()) { + if query_str.is_some_and(|query| query.is_empty()) { return BTreeMap::default(); } @@ -4456,27 +4464,26 @@ impl BufferSnapshot { current_word_start_ix = Some(ix); } - if let Some(query_chars) = &query_chars { - if query_ix < query_len { - if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) { - query_ix += 1; - } - } + if let Some(query_chars) = &query_chars + && query_ix < query_len + && c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) + { + query_ix += 1; } continue; - } else if let Some(word_start) = current_word_start_ix.take() { - if query_ix == query_len { - let word_range = self.anchor_before(word_start)..self.anchor_after(ix); - let mut word_text = self.text_for_range(word_start..ix).peekable(); - let first_char = word_text - .peek() - .and_then(|first_chunk| first_chunk.chars().next()); - // Skip empty and "words" starting with digits as a heuristic to reduce useless completions - if !query.skip_digits - || first_char.map_or(true, |first_char| !first_char.is_digit(10)) - { - words.insert(word_text.collect(), word_range); - } + } else if let Some(word_start) = current_word_start_ix.take() + && query_ix == query_len + { + let word_range = self.anchor_before(word_start)..self.anchor_after(ix); + let mut word_text = self.text_for_range(word_start..ix).peekable(); + let first_char = word_text + .peek() + .and_then(|first_chunk| first_chunk.chars().next()); + // Skip empty and "words" starting with digits as a heuristic to reduce useless completions + if !query.skip_digits + || first_char.is_none_or(|first_char| !first_char.is_digit(10)) + { + words.insert(word_text.collect(), word_range); } } query_ix = 0; @@ -4589,17 +4596,17 @@ impl<'a> BufferChunks<'a> { highlights .stack .retain(|(end_offset, _)| *end_offset > range.start); - if let Some(capture) = &highlights.next_capture { - if range.start >= capture.node.start_byte() { - let next_capture_end = capture.node.end_byte(); - if range.start < next_capture_end { - highlights.stack.push(( - next_capture_end, - highlights.highlight_maps[capture.grammar_index].get(capture.index), - )); - } - highlights.next_capture.take(); + if let Some(capture) = &highlights.next_capture + && range.start >= capture.node.start_byte() + { + let next_capture_end = capture.node.end_byte(); + if range.start < next_capture_end { + highlights.stack.push(( + next_capture_end, + highlights.highlight_maps[capture.grammar_index].get(capture.index), + )); } + highlights.next_capture.take(); } } else if let Some(snapshot) = self.buffer_snapshot { let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone()); @@ -4624,33 +4631,33 @@ impl<'a> BufferChunks<'a> { } fn initialize_diagnostic_endpoints(&mut self) { - if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() { - if let Some(buffer) = self.buffer_snapshot { - let mut diagnostic_endpoints = Vec::new(); - for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) { - diagnostic_endpoints.push(DiagnosticEndpoint { - offset: entry.range.start, - is_start: true, - severity: entry.diagnostic.severity, - is_unnecessary: entry.diagnostic.is_unnecessary, - underline: entry.diagnostic.underline, - }); - diagnostic_endpoints.push(DiagnosticEndpoint { - offset: entry.range.end, - is_start: false, - severity: entry.diagnostic.severity, - is_unnecessary: entry.diagnostic.is_unnecessary, - underline: entry.diagnostic.underline, - }); - } - diagnostic_endpoints - .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); - *diagnostics = diagnostic_endpoints.into_iter().peekable(); - self.hint_depth = 0; - self.error_depth = 0; - self.warning_depth = 0; - self.information_depth = 0; + if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() + && let Some(buffer) = self.buffer_snapshot + { + let mut diagnostic_endpoints = Vec::new(); + for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) { + diagnostic_endpoints.push(DiagnosticEndpoint { + offset: entry.range.start, + is_start: true, + severity: entry.diagnostic.severity, + is_unnecessary: entry.diagnostic.is_unnecessary, + underline: entry.diagnostic.underline, + }); + diagnostic_endpoints.push(DiagnosticEndpoint { + offset: entry.range.end, + is_start: false, + severity: entry.diagnostic.severity, + is_unnecessary: entry.diagnostic.is_unnecessary, + underline: entry.diagnostic.underline, + }); } + diagnostic_endpoints + .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); + *diagnostics = diagnostic_endpoints.into_iter().peekable(); + self.hint_depth = 0; + self.error_depth = 0; + self.warning_depth = 0; + self.information_depth = 0; } } @@ -4761,11 +4768,11 @@ impl<'a> Iterator for BufferChunks<'a> { .min(next_capture_start) .min(next_diagnostic_endpoint); let mut highlight_id = None; - if let Some(highlights) = self.highlights.as_ref() { - if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() { - chunk_end = chunk_end.min(*parent_capture_end); - highlight_id = Some(*parent_highlight_id); - } + if let Some(highlights) = self.highlights.as_ref() + && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() + { + chunk_end = chunk_end.min(*parent_capture_end); + highlight_id = Some(*parent_highlight_id); } let slice = @@ -4959,11 +4966,12 @@ pub(crate) fn contiguous_ranges( std::iter::from_fn(move || { loop { if let Some(value) = values.next() { - if let Some(range) = &mut current_range { - if value == range.end && range.len() < max_len { - range.end += 1; - continue; - } + if let Some(range) = &mut current_range + && value == range.end + && range.len() < max_len + { + range.end += 1; + continue; } let prev_range = current_range.clone(); @@ -5031,10 +5039,10 @@ impl CharClassifier { } else { scope.word_characters() }; - if let Some(characters) = characters { - if characters.contains(&c) { - return CharKind::Word; - } + if let Some(characters) = characters + && characters.contains(&c) + { + return CharKind::Word; } } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 2e2df7e658596daaca3b338ef830794fd0d3bef8..ce65afa6288767766fa9a1da5c3a24f9ca86e580 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -1744,7 +1744,7 @@ fn test_autoindent_block_mode(cx: &mut App) { buffer.edit( [(Point::new(2, 8)..Point::new(2, 8), inserted_text)], Some(AutoindentMode::Block { - original_indent_columns: original_indent_columns.clone(), + original_indent_columns, }), cx, ); @@ -1790,9 +1790,9 @@ fn test_autoindent_block_mode_with_newline(cx: &mut App) { "# .unindent(); buffer.edit( - [(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())], + [(Point::new(2, 0)..Point::new(2, 0), inserted_text)], Some(AutoindentMode::Block { - original_indent_columns: original_indent_columns.clone(), + original_indent_columns, }), cx, ); @@ -1843,7 +1843,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut App) { buffer.edit( [(Point::new(2, 0)..Point::new(2, 0), inserted_text)], Some(AutoindentMode::Block { - original_indent_columns: original_indent_columns.clone(), + original_indent_columns, }), cx, ); @@ -2030,7 +2030,7 @@ fn test_autoindent_with_injected_languages(cx: &mut App) { let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); language_registry.add(html_language.clone()); - language_registry.add(javascript_language.clone()); + language_registry.add(javascript_language); cx.new(|cx| { let (text, ranges) = marked_text_ranges( diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index b9933dfcec36f1e8c5cb31271668a25b60020c8a..7ae77c9141d35363975f07b91b45f032da62d21f 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -44,6 +44,7 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use serde_json::Value; use settings::WorktreeId; use smol::future::FutureExt as _; +use std::num::NonZeroU32; use std::{ any::Any, ffi::OsStr, @@ -59,7 +60,6 @@ use std::{ atomic::{AtomicU64, AtomicUsize, Ordering::SeqCst}, }, }; -use std::{num::NonZeroU32, sync::OnceLock}; use syntax_map::{QueryCursorHandle, SyntaxSnapshot}; use task::RunnableTag; pub use task_context::{ContextLocation, ContextProvider, RunnableRange}; @@ -67,7 +67,9 @@ pub use text_diff::{ DiffOptions, apply_diff_patch, line_diff, text_diff, text_diff_with_options, unified_diff, }; use theme::SyntaxTheme; -pub use toolchain::{LanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister}; +pub use toolchain::{ + LanguageToolchainStore, LocalLanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister, +}; use tree_sitter::{self, Query, QueryCursor, WasmStore, wasmtime}; use util::serde::default_true; @@ -119,8 +121,8 @@ where func(cursor.deref_mut()) } -static NEXT_LANGUAGE_ID: LazyLock = LazyLock::new(Default::default); -static NEXT_GRAMMAR_ID: LazyLock = LazyLock::new(Default::default); +static NEXT_LANGUAGE_ID: AtomicUsize = AtomicUsize::new(0); +static NEXT_GRAMMAR_ID: AtomicUsize = AtomicUsize::new(0); static WASM_ENGINE: LazyLock = LazyLock::new(|| { wasmtime::Engine::new(&wasmtime::Config::new()).expect("Failed to create Wasmtime engine") }); @@ -165,7 +167,6 @@ pub struct CachedLspAdapter { pub adapter: Arc, pub reinstall_attempt_count: AtomicU64, cached_binary: futures::lock::Mutex>, - manifest_name: OnceLock>, } impl Debug for CachedLspAdapter { @@ -201,18 +202,17 @@ impl CachedLspAdapter { adapter, cached_binary: Default::default(), reinstall_attempt_count: AtomicU64::new(0), - manifest_name: Default::default(), }) } pub fn name(&self) -> LanguageServerName { - self.adapter.name().clone() + self.adapter.name() } pub async fn get_language_server_command( self: Arc, delegate: Arc, - toolchains: Arc, + toolchains: Option, binary_options: LanguageServerBinaryOptions, cx: &mut AsyncApp, ) -> Result { @@ -281,21 +281,6 @@ impl CachedLspAdapter { .cloned() .unwrap_or_else(|| language_name.lsp_id()) } - - pub fn manifest_name(&self) -> Option { - self.manifest_name - .get_or_init(|| self.adapter.manifest_name()) - .clone() - } -} - -/// Determines what gets sent out as a workspace folders content -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum WorkspaceFoldersContent { - /// Send out a single entry with the root of the workspace. - WorktreeRoot, - /// Send out a list of subproject roots. - SubprojectRoots, } /// [`LspAdapterDelegate`] allows [`LspAdapter]` implementations to interface with the application @@ -327,7 +312,7 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, delegate: Arc, - toolchains: Arc, + toolchains: Option, binary_options: LanguageServerBinaryOptions, mut cached_binary: futures::lock::MutexGuard<'a, Option>, cx: &'a mut AsyncApp, @@ -344,9 +329,9 @@ pub trait LspAdapter: 'static + Send + Sync { // We only want to cache when we fall back to the global one, // because we don't want to download and overwrite our global one // for each worktree we might have open. - if binary_options.allow_path_lookup { - if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), toolchains, cx).await { - log::info!( + if binary_options.allow_path_lookup + && let Some(binary) = self.check_if_user_installed(delegate.as_ref(), toolchains, cx).await { + log::debug!( "found user-installed language server for {}. path: {:?}, arguments: {:?}", self.name().0, binary.path, @@ -354,7 +339,6 @@ pub trait LspAdapter: 'static + Send + Sync { ); return Ok(binary); } - } anyhow::ensure!(binary_options.allow_binary_download, "downloading language servers disabled"); @@ -402,7 +386,7 @@ pub trait LspAdapter: 'static + Send + Sync { async fn check_if_user_installed( &self, _: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { None @@ -535,7 +519,7 @@ pub trait LspAdapter: 'static + Send + Sync { self: Arc, _: &dyn Fs, _: &Arc, - _: Arc, + _: Option, _cx: &mut AsyncApp, ) -> Result { Ok(serde_json::json!({})) @@ -555,7 +539,6 @@ pub trait LspAdapter: 'static + Send + Sync { _target_language_server_id: LanguageServerName, _: &dyn Fs, _: &Arc, - _: Arc, _cx: &mut AsyncApp, ) -> Result> { Ok(None) @@ -587,17 +570,6 @@ pub trait LspAdapter: 'static + Send + Sync { Ok(original) } - /// Determines whether a language server supports workspace folders. - /// - /// And does not trip over itself in the process. - fn workspace_folders_content(&self) -> WorkspaceFoldersContent { - WorkspaceFoldersContent::SubprojectRoots - } - - fn manifest_name(&self) -> Option { - None - } - /// Method only implemented by the default JSON language server adapter. /// Used to provide dynamic reloading of the JSON schemas used to /// provide autocompletion and diagnostics in Zed setting and keybind @@ -629,7 +601,7 @@ async fn try_fetch_server_binary } let name = adapter.name(); - log::info!("fetching latest version of language server {:?}", name.0); + log::debug!("fetching latest version of language server {:?}", name.0); delegate.update_status(name.clone(), BinaryStatus::CheckingForUpdate); let latest_version = adapter @@ -640,7 +612,7 @@ async fn try_fetch_server_binary .check_if_version_installed(latest_version.as_ref(), &container_dir, delegate.as_ref()) .await { - log::info!("language server {:?} is already installed", name.0); + log::debug!("language server {:?} is already installed", name.0); delegate.update_status(name.clone(), BinaryStatus::None); Ok(binary) } else { @@ -991,11 +963,11 @@ where fn deserialize_regex_vec<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { let sources = Vec::::deserialize(d)?; - let mut regexes = Vec::new(); - for source in sources { - regexes.push(regex::Regex::new(&source).map_err(de::Error::custom)?); - } - Ok(regexes) + sources + .into_iter() + .map(|source| regex::Regex::new(&source)) + .collect::>() + .map_err(de::Error::custom) } fn regex_vec_json_schema(_: &mut SchemaGenerator) -> schemars::Schema { @@ -1061,12 +1033,10 @@ impl<'de> Deserialize<'de> for BracketPairConfig { D: Deserializer<'de>, { let result = Vec::::deserialize(deserializer)?; - let mut brackets = Vec::with_capacity(result.len()); - let mut disabled_scopes_by_bracket_ix = Vec::with_capacity(result.len()); - for entry in result { - brackets.push(entry.bracket_pair); - disabled_scopes_by_bracket_ix.push(entry.not_in); - } + let (brackets, disabled_scopes_by_bracket_ix) = result + .into_iter() + .map(|entry| (entry.bracket_pair, entry.not_in)) + .unzip(); Ok(BracketPairConfig { pairs: brackets, @@ -1108,6 +1078,7 @@ pub struct Language { pub(crate) grammar: Option>, pub(crate) context_provider: Option>, pub(crate) toolchain: Option>, + pub(crate) manifest_name: Option, } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] @@ -1318,6 +1289,7 @@ impl Language { }), context_provider: None, toolchain: None, + manifest_name: None, } } @@ -1331,6 +1303,10 @@ impl Language { self } + pub fn with_manifest(mut self, name: Option) -> Self { + self.manifest_name = name; + self + } pub fn with_queries(mut self, queries: LanguageQueries) -> Result { if let Some(query) = queries.highlights { self = self @@ -1400,16 +1376,14 @@ impl Language { let grammar = self.grammar_mut().context("cannot mutate grammar")?; let query = Query::new(&grammar.ts_language, source)?; - let mut extra_captures = Vec::with_capacity(query.capture_names().len()); - - for name in query.capture_names().iter() { - let kind = if *name == "run" { - RunnableCapture::Run - } else { - RunnableCapture::Named(name.to_string().into()) - }; - extra_captures.push(kind); - } + let extra_captures: Vec<_> = query + .capture_names() + .iter() + .map(|&name| match name { + "run" => RunnableCapture::Run, + name => RunnableCapture::Named(name.to_string().into()), + }) + .collect(); grammar.runnable_config = Some(RunnableConfig { extra_captures, @@ -1539,9 +1513,8 @@ impl Language { .map(|ix| { let mut config = BracketsPatternConfig::default(); for setting in query.property_settings(ix) { - match setting.key.as_ref() { - "newline.only" => config.newline_only = true, - _ => {} + if setting.key.as_ref() == "newline.only" { + config.newline_only = true } } config @@ -1764,6 +1737,9 @@ impl Language { pub fn name(&self) -> LanguageName { self.config.name.clone() } + pub fn manifest(&self) -> Option<&ManifestName> { + self.manifest_name.as_ref() + } pub fn code_fence_block_name(&self) -> Arc { self.config @@ -1798,10 +1774,10 @@ impl Language { BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None) { let end_offset = offset + chunk.text.len(); - if let Some(highlight_id) = chunk.syntax_highlight_id { - if !highlight_id.is_default() { - result.push((offset..end_offset, highlight_id)); - } + if let Some(highlight_id) = chunk.syntax_highlight_id + && !highlight_id.is_default() + { + result.push((offset..end_offset, highlight_id)); } offset = end_offset; } @@ -1818,11 +1794,11 @@ impl Language { } pub fn set_theme(&self, theme: &SyntaxTheme) { - if let Some(grammar) = self.grammar.as_ref() { - if let Some(highlights_query) = &grammar.highlights_query { - *grammar.highlight_map.lock() = - HighlightMap::new(highlights_query.capture_names(), theme); - } + if let Some(grammar) = self.grammar.as_ref() + && let Some(highlights_query) = &grammar.highlights_query + { + *grammar.highlight_map.lock() = + HighlightMap::new(highlights_query.capture_names(), theme); } } @@ -1852,7 +1828,7 @@ impl Language { impl LanguageScope { pub fn path_suffixes(&self) -> &[String] { - &self.language.path_suffixes() + self.language.path_suffixes() } pub fn language_name(&self) -> LanguageName { @@ -1942,11 +1918,11 @@ impl LanguageScope { .enumerate() .map(move |(ix, bracket)| { let mut is_enabled = true; - if let Some(next_disabled_ix) = disabled_ids.first() { - if ix == *next_disabled_ix as usize { - disabled_ids = &disabled_ids[1..]; - is_enabled = false; - } + if let Some(next_disabled_ix) = disabled_ids.first() + && ix == *next_disabled_ix as usize + { + disabled_ids = &disabled_ids[1..]; + is_enabled = false; } (bracket, is_enabled) }) @@ -2209,7 +2185,7 @@ impl LspAdapter for FakeLspAdapter { async fn check_if_user_installed( &self, _: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { Some(self.language_server_binary.clone()) @@ -2218,7 +2194,7 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, _: Arc, - _: Arc, + _: Option, _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncApp, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index ea988e8098ec2a795e8c0a386b4e162ecd5c89ca..4f07240e44599361bf92188fd410dd674745874a 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -1,6 +1,6 @@ use crate::{ CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, - LanguageServerName, LspAdapter, PLAIN_TEXT, ToolchainLister, + LanguageServerName, LspAdapter, ManifestName, PLAIN_TEXT, ToolchainLister, language_settings::{ AllLanguageSettingsContent, LanguageSettingsContent, all_language_settings, }, @@ -49,7 +49,7 @@ impl LanguageName { pub fn from_proto(s: String) -> Self { Self(SharedString::from(s)) } - pub fn to_proto(self) -> String { + pub fn to_proto(&self) -> String { self.0.to_string() } pub fn lsp_id(&self) -> String { @@ -172,6 +172,7 @@ pub struct AvailableLanguage { hidden: bool, load: Arc Result + 'static + Send + Sync>, loaded: bool, + manifest_name: Option, } impl AvailableLanguage { @@ -259,6 +260,7 @@ pub struct LoadedLanguage { pub queries: LanguageQueries, pub context_provider: Option>, pub toolchain_provider: Option>, + pub manifest_name: Option, } impl LanguageRegistry { @@ -349,12 +351,14 @@ impl LanguageRegistry { config.grammar.clone(), config.matcher.clone(), config.hidden, + None, Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), queries: Default::default(), toolchain_provider: None, context_provider: None, + manifest_name: None, }) }), ) @@ -428,7 +432,7 @@ impl LanguageRegistry { let mut state = self.state.write(); state .lsp_adapters - .entry(language_name.clone()) + .entry(language_name) .or_default() .push(adapter.clone()); state.all_lsp_adapters.insert(adapter.name(), adapter); @@ -450,7 +454,7 @@ impl LanguageRegistry { let cached_adapter = CachedLspAdapter::new(Arc::new(adapter)); state .lsp_adapters - .entry(language_name.clone()) + .entry(language_name) .or_default() .push(cached_adapter.clone()); state @@ -487,6 +491,7 @@ impl LanguageRegistry { grammar_name: Option>, matcher: LanguageMatcher, hidden: bool, + manifest_name: Option, load: Arc Result + 'static + Send + Sync>, ) { let state = &mut *self.state.write(); @@ -496,6 +501,7 @@ impl LanguageRegistry { existing_language.grammar = grammar_name; existing_language.matcher = matcher; existing_language.load = load; + existing_language.manifest_name = manifest_name; return; } } @@ -508,6 +514,7 @@ impl LanguageRegistry { load, hidden, loaded: false, + manifest_name, }); state.version += 1; state.reload_count += 1; @@ -575,6 +582,7 @@ impl LanguageRegistry { grammar: language.config.grammar.clone(), matcher: language.config.matcher.clone(), hidden: language.config.hidden, + manifest_name: None, load: Arc::new(|| Err(anyhow!("already loaded"))), loaded: true, }); @@ -765,7 +773,7 @@ impl LanguageRegistry { }; let content_matches = || { - config.first_line_pattern.as_ref().map_or(false, |pattern| { + config.first_line_pattern.as_ref().is_some_and(|pattern| { content .as_ref() .is_some_and(|content| pattern.is_match(content)) @@ -914,10 +922,12 @@ impl LanguageRegistry { Language::new_with_id(id, loaded_language.config, grammar) .with_context_provider(loaded_language.context_provider) .with_toolchain_lister(loaded_language.toolchain_provider) + .with_manifest(loaded_language.manifest_name) .with_queries(loaded_language.queries) } else { Ok(Language::new_with_id(id, loaded_language.config, None) .with_context_provider(loaded_language.context_provider) + .with_manifest(loaded_language.manifest_name) .with_toolchain_lister(loaded_language.toolchain_provider)) } } @@ -1092,7 +1102,7 @@ impl LanguageRegistry { use gpui::AppContext as _; let mut state = self.state.write(); - let fake_entry = state.fake_server_entries.get_mut(&name)?; + let fake_entry = state.fake_server_entries.get_mut(name)?; let (server, mut fake_server) = lsp::FakeLanguageServer::new( server_id, binary, @@ -1157,8 +1167,7 @@ impl LanguageRegistryState { soft_wrap: language.config.soft_wrap, auto_indent_on_paste: language.config.auto_indent_on_paste, ..Default::default() - } - .clone(), + }, ); self.languages.push(language); self.version += 1; diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 9b0abb15379916453eeeeb35860e859a7f721458..386ad19747bfb5066dbf27f07214dcdff4829809 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -5,7 +5,7 @@ use anyhow::Result; use collections::{FxHashMap, HashMap, HashSet}; use ec4rs::{ Properties as EditorconfigProperties, - property::{FinalNewline, IndentSize, IndentStyle, TabWidth, TrimTrailingWs}, + property::{FinalNewline, IndentSize, IndentStyle, MaxLineLen, TabWidth, TrimTrailingWs}, }; use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder}; use gpui::{App, Modifiers}; @@ -133,6 +133,8 @@ pub struct LanguageSettings { /// Whether to use additional LSP queries to format (and amend) the code after /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, + /// Whether indentation should be adjusted based on the context whilst typing. + pub auto_indent: bool, /// Whether indentation of pasted content should be adjusted based on the context. pub auto_indent_on_paste: bool, /// Controls how the editor handles the autoclosed characters. @@ -185,8 +187,8 @@ impl LanguageSettings { let rest = available_language_servers .iter() .filter(|&available_language_server| { - !disabled_language_servers.contains(&available_language_server) - && !enabled_language_servers.contains(&available_language_server) + !disabled_language_servers.contains(available_language_server) + && !enabled_language_servers.contains(available_language_server) }) .cloned() .collect::>(); @@ -197,7 +199,7 @@ impl LanguageSettings { if language_server.0.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { rest.clone() } else { - vec![language_server.clone()] + vec![language_server] } }) .collect::>() @@ -251,7 +253,7 @@ impl EditPredictionSettings { !self.disabled_globs.iter().any(|glob| { if glob.is_absolute { file.as_local() - .map_or(false, |local| glob.matcher.is_match(local.abs_path(cx))) + .is_some_and(|local| glob.matcher.is_match(local.abs_path(cx))) } else { glob.matcher.is_match(file.path()) } @@ -561,6 +563,10 @@ pub struct LanguageSettingsContent { /// /// Default: true pub linked_edits: Option, + /// Whether indentation should be adjusted based on the context whilst typing. + /// + /// Default: true + pub auto_indent: Option, /// Whether indentation of pasted content should be adjusted based on the context. /// /// Default: true @@ -987,7 +993,7 @@ pub struct InlayHintSettings { /// Default: false #[serde(default)] pub enabled: bool, - /// Global switch to toggle inline values on and off. + /// Global switch to toggle inline values on and off when debugging. /// /// Default: true #[serde(default = "default_true")] @@ -1125,6 +1131,10 @@ impl AllLanguageSettings { } fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigProperties) { + let preferred_line_length = cfg.get::().ok().and_then(|v| match v { + MaxLineLen::Value(u) => Some(u as u32), + MaxLineLen::Off => None, + }); let tab_size = cfg.get::().ok().and_then(|v| match v { IndentSize::Value(u) => NonZeroU32::new(u as u32), IndentSize::UseTabWidth => cfg.get::().ok().and_then(|w| match w { @@ -1152,6 +1162,7 @@ fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigPr *target = value; } } + merge(&mut settings.preferred_line_length, preferred_line_length); merge(&mut settings.tab_size, tab_size); merge(&mut settings.hard_tabs, hard_tabs); merge( @@ -1517,6 +1528,7 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent merge(&mut settings.use_autoclose, src.use_autoclose); merge(&mut settings.use_auto_surround, src.use_auto_surround); merge(&mut settings.use_on_type_format, src.use_on_type_format); + merge(&mut settings.auto_indent, src.auto_indent); merge(&mut settings.auto_indent_on_paste, src.auto_indent_on_paste); merge( &mut settings.always_treat_brackets_as_autoclosed, @@ -1786,7 +1798,7 @@ mod tests { assert!(!settings.enabled_for_file(&dot_env_file, &cx)); // Test tilde expansion - let home = shellexpand::tilde("~").into_owned().to_string(); + let home = shellexpand::tilde("~").into_owned(); let home_file = make_test_file(&[&home, "test.rs"]); let settings = build_settings(&["~/test.rs"]); assert!(!settings.enabled_for_file(&home_file, &cx)); diff --git a/crates/language/src/manifest.rs b/crates/language/src/manifest.rs index 37505fec3b233c2ecd7e2ac7807a7ade6a9b3d4a..3ca0ddf71da20f69d5d6440189d4a656bfbe7c9d 100644 --- a/crates/language/src/manifest.rs +++ b/crates/language/src/manifest.rs @@ -12,6 +12,12 @@ impl Borrow for ManifestName { } } +impl Borrow for ManifestName { + fn borrow(&self) -> &str { + &self.0 + } +} + impl From for ManifestName { fn from(value: SharedString) -> Self { Self(value) diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 18f6bb8709c707af9dd19223cac30d6728eda160..3be189cea08f247f97d05e6b9714f07d17289a8a 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -86,7 +86,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { proto::operation::UpdateCompletionTriggers { replica_id: lamport_timestamp.replica_id as u32, lamport_timestamp: lamport_timestamp.value, - triggers: triggers.iter().cloned().collect(), + triggers: triggers.clone(), language_server_id: server_id.to_proto(), }, ), @@ -385,12 +385,10 @@ pub fn deserialize_undo_map_entry( /// Deserializes selections from the RPC representation. pub fn deserialize_selections(selections: Vec) -> Arc<[Selection]> { - Arc::from( - selections - .into_iter() - .filter_map(deserialize_selection) - .collect::>(), - ) + selections + .into_iter() + .filter_map(deserialize_selection) + .collect() } /// Deserializes a [`Selection`] from the RPC representation. diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index c56ffed0663a9419419201f902f3db8311acb9bd..38aad007fe16c655a3802bd70c9b709cbe83ea68 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -414,42 +414,42 @@ impl SyntaxSnapshot { .collect::>(); self.reparse_with_ranges(text, root_language.clone(), edit_ranges, registry.as_ref()); - if let Some(registry) = registry { - if registry.version() != self.language_registry_version { - let mut resolved_injection_ranges = Vec::new(); - let mut cursor = self - .layers - .filter::<_, ()>(text, |summary| summary.contains_unknown_injections); - cursor.next(); - while let Some(layer) = cursor.item() { - let SyntaxLayerContent::Pending { language_name } = &layer.content else { - unreachable!() - }; - if registry - .language_for_name_or_extension(language_name) - .now_or_never() - .and_then(|language| language.ok()) - .is_some() - { - let range = layer.range.to_offset(text); - log::trace!("reparse range {range:?} for language {language_name:?}"); - resolved_injection_ranges.push(range); - } - - cursor.next(); - } - drop(cursor); - - if !resolved_injection_ranges.is_empty() { - self.reparse_with_ranges( - text, - root_language, - resolved_injection_ranges, - Some(®istry), - ); + if let Some(registry) = registry + && registry.version() != self.language_registry_version + { + let mut resolved_injection_ranges = Vec::new(); + let mut cursor = self + .layers + .filter::<_, ()>(text, |summary| summary.contains_unknown_injections); + cursor.next(); + while let Some(layer) = cursor.item() { + let SyntaxLayerContent::Pending { language_name } = &layer.content else { + unreachable!() + }; + if registry + .language_for_name_or_extension(language_name) + .now_or_never() + .and_then(|language| language.ok()) + .is_some() + { + let range = layer.range.to_offset(text); + log::trace!("reparse range {range:?} for language {language_name:?}"); + resolved_injection_ranges.push(range); } - self.language_registry_version = registry.version(); + + cursor.next(); } + drop(cursor); + + if !resolved_injection_ranges.is_empty() { + self.reparse_with_ranges( + text, + root_language, + resolved_injection_ranges, + Some(®istry), + ); + } + self.language_registry_version = registry.version(); } self.update_count += 1; @@ -832,7 +832,7 @@ impl SyntaxSnapshot { query: fn(&Grammar) -> Option<&Query>, ) -> SyntaxMapCaptures<'a> { SyntaxMapCaptures::new( - range.clone(), + range, text, [SyntaxLayer { language, @@ -1065,10 +1065,10 @@ impl<'a> SyntaxMapCaptures<'a> { pub fn set_byte_range(&mut self, range: Range) { for layer in &mut self.layers { layer.captures.set_byte_range(range.clone()); - if let Some(capture) = &layer.next_capture { - if capture.node.end_byte() > range.start { - continue; - } + if let Some(capture) = &layer.next_capture + && capture.node.end_byte() > range.start + { + continue; } layer.advance(); } @@ -1277,11 +1277,11 @@ fn join_ranges( (None, None) => break, }; - if let Some(last) = result.last_mut() { - if range.start <= last.end { - last.end = last.end.max(range.end); - continue; - } + if let Some(last) = result.last_mut() + && range.start <= last.end + { + last.end = last.end.max(range.end); + continue; } result.push(range); } @@ -1297,7 +1297,7 @@ fn parse_text( ) -> anyhow::Result { with_parser(|parser| { let mut chunks = text.chunks_in_range(start_byte..text.len()); - parser.set_included_ranges(&ranges)?; + parser.set_included_ranges(ranges)?; parser.set_language(&grammar.ts_language)?; parser .parse_with_options( @@ -1330,14 +1330,13 @@ fn get_injections( // if there currently no matches for that injection. combined_injection_ranges.clear(); for pattern in &config.patterns { - if let (Some(language_name), true) = (pattern.language.as_ref(), pattern.combined) { - if let Some(language) = language_registry + if let (Some(language_name), true) = (pattern.language.as_ref(), pattern.combined) + && let Some(language) = language_registry .language_for_name_or_extension(language_name) .now_or_never() .and_then(|language| language.ok()) - { - combined_injection_ranges.insert(language.id, (language, Vec::new())); - } + { + combined_injection_ranges.insert(language.id, (language, Vec::new())); } } @@ -1357,10 +1356,11 @@ fn get_injections( content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte; // Avoid duplicate matches if two changed ranges intersect the same injection. - if let Some((prev_pattern_ix, prev_range)) = &prev_match { - if mat.pattern_index == *prev_pattern_ix && content_range == *prev_range { - continue; - } + if let Some((prev_pattern_ix, prev_range)) = &prev_match + && mat.pattern_index == *prev_pattern_ix + && content_range == *prev_range + { + continue; } prev_match = Some((mat.pattern_index, content_range.clone())); @@ -1630,10 +1630,8 @@ impl<'a> SyntaxLayer<'a> { if offset < range.start || offset > range.end { continue; } - } else { - if offset <= range.start || offset >= range.end { - continue; - } + } else if offset <= range.start || offset >= range.end { + continue; } if let Some((_, smallest_range)) = &smallest_match { diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index d576c95cd58eb823a7f8bdfdc42be9ba6a743410..622731b7814ce16bfcc026b6723e80d5ba4dda7a 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -58,8 +58,7 @@ fn test_splice_included_ranges() { assert_eq!(change, 0..1); // does not create overlapping ranges - let (new_ranges, change) = - splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]); + let (new_ranges, change) = splice_included_ranges(ranges, &[0..18], &[ts_range(20..32)]); assert_eq!( new_ranges, &[ts_range(20..32), ts_range(50..60), ts_range(80..90)] @@ -104,7 +103,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) { ); let mut syntax_map = SyntaxMap::new(&buffer); - syntax_map.set_language_registry(registry.clone()); + syntax_map.set_language_registry(registry); syntax_map.reparse(language.clone(), &buffer); assert_layers_for_range( @@ -165,7 +164,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) { // Put the vec! macro back, adding back the syntactic layer. buffer.undo(); syntax_map.interpolate(&buffer); - syntax_map.reparse(language.clone(), &buffer); + syntax_map.reparse(language, &buffer); assert_layers_for_range( &syntax_map, @@ -252,8 +251,8 @@ fn test_dynamic_language_injection(cx: &mut App) { assert!(syntax_map.contains_unknown_injections()); registry.add(Arc::new(html_lang())); - syntax_map.reparse(markdown.clone(), &buffer); - syntax_map.reparse(markdown_inline.clone(), &buffer); + syntax_map.reparse(markdown, &buffer); + syntax_map.reparse(markdown_inline, &buffer); assert_layers_for_range( &syntax_map, &buffer, @@ -862,7 +861,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) { log::info!("editing"); buffer.edit_via_marked_text(&text); syntax_map.interpolate(&buffer); - syntax_map.reparse(language.clone(), &buffer); + syntax_map.reparse(language, &buffer); assert_capture_ranges( &syntax_map, @@ -986,7 +985,7 @@ fn test_random_edits( syntax_map.reparse(language.clone(), &buffer); let mut reference_syntax_map = SyntaxMap::new(&buffer); - reference_syntax_map.set_language_registry(registry.clone()); + reference_syntax_map.set_language_registry(registry); log::info!("initial text:\n{}", buffer.text()); diff --git a/crates/language/src/text_diff.rs b/crates/language/src/text_diff.rs index f9221f571afb1baa0ba0b824922e799fcec01c88..11d8a070d213852f0a98078f2ed8c76c9cced47b 100644 --- a/crates/language/src/text_diff.rs +++ b/crates/language/src/text_diff.rs @@ -88,11 +88,11 @@ pub fn text_diff_with_options( let new_offset = new_byte_range.start; hunk_input.clear(); hunk_input.update_before(tokenize( - &old_text[old_byte_range.clone()], + &old_text[old_byte_range], options.language_scope.clone(), )); hunk_input.update_after(tokenize( - &new_text[new_byte_range.clone()], + &new_text[new_byte_range], options.language_scope.clone(), )); diff_internal(&hunk_input, |old_byte_range, new_byte_range, _, _| { @@ -103,7 +103,7 @@ pub fn text_diff_with_options( let replacement_text = if new_byte_range.is_empty() { empty.clone() } else { - new_text[new_byte_range.clone()].into() + new_text[new_byte_range].into() }; edits.push((old_byte_range, replacement_text)); }); @@ -111,9 +111,9 @@ pub fn text_diff_with_options( let replacement_text = if new_byte_range.is_empty() { empty.clone() } else { - new_text[new_byte_range.clone()].into() + new_text[new_byte_range].into() }; - edits.push((old_byte_range.clone(), replacement_text)); + edits.push((old_byte_range, replacement_text)); } }, ); @@ -154,19 +154,19 @@ fn diff_internal( input, |old_tokens: Range, new_tokens: Range| { old_offset += token_len( - &input, + input, &input.before[old_token_ix as usize..old_tokens.start as usize], ); new_offset += token_len( - &input, + input, &input.after[new_token_ix as usize..new_tokens.start as usize], ); let old_len = token_len( - &input, + input, &input.before[old_tokens.start as usize..old_tokens.end as usize], ); let new_len = token_len( - &input, + input, &input.after[new_tokens.start as usize..new_tokens.end as usize], ); let old_byte_range = old_offset..old_offset + old_len; @@ -186,14 +186,14 @@ fn tokenize(text: &str, language_scope: Option) -> impl Iterator< let mut prev = None; let mut start_ix = 0; iter::from_fn(move || { - while let Some((ix, c)) = chars.next() { + for (ix, c) in chars.by_ref() { let mut token = None; let kind = classifier.kind(c); - if let Some((prev_char, prev_kind)) = prev { - if kind != prev_kind || (kind == CharKind::Punctuation && c != prev_char) { - token = Some(&text[start_ix..ix]); - start_ix = ix; - } + if let Some((prev_char, prev_kind)) = prev + && (kind != prev_kind || (kind == CharKind::Punctuation && c != prev_char)) + { + token = Some(&text[start_ix..ix]); + start_ix = ix; } prev = Some((c, kind)); if token.is_some() { diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 1f4b038f68e5fcf1ed5c499d543fa92ba3c2de94..979513bc96f9660772517a728077614fbd7f5e7a 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -17,7 +17,7 @@ use settings::WorktreeId; use crate::{LanguageName, ManifestName}; /// Represents a single toolchain. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq)] pub struct Toolchain { /// User-facing label pub name: SharedString, @@ -27,6 +27,14 @@ pub struct Toolchain { pub as_json: serde_json::Value, } +impl std::hash::Hash for Toolchain { + fn hash(&self, state: &mut H) { + self.name.hash(state); + self.path.hash(state); + self.language_name.hash(state); + } +} + impl PartialEq for Toolchain { fn eq(&self, other: &Self) -> bool { // Do not use as_json for comparisons; it shouldn't impact equality, as it's not user-surfaced. @@ -64,6 +72,29 @@ pub trait LanguageToolchainStore: Send + Sync + 'static { ) -> Option; } +pub trait LocalLanguageToolchainStore: Send + Sync + 'static { + fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + relative_path: &Arc, + language_name: LanguageName, + cx: &mut AsyncApp, + ) -> Option; +} + +#[async_trait(?Send )] +impl LanguageToolchainStore for T { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + relative_path: Arc, + language_name: LanguageName, + cx: &mut AsyncApp, + ) -> Option { + self.active_toolchain(worktree_id, &relative_path, language_name, cx) + } +} + type DefaultIndex = usize; #[derive(Default, Clone)] pub struct ToolchainList { diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index 98b6fd4b5a2ef6e7f1b5adbc54dcecd0707b60ff..e465a8dd0a0404e76b19942dd15bf19c4f204fdc 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -12,8 +12,8 @@ use fs::Fs; use futures::{Future, FutureExt, future::join_all}; use gpui::{App, AppContext, AsyncApp, Task}; use language::{ - BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LanguageToolchainStore, - LspAdapter, LspAdapterDelegate, + BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LspAdapter, LspAdapterDelegate, + Toolchain, }; use lsp::{ CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName, @@ -159,7 +159,7 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, delegate: Arc, - _: Arc, + _: Option, _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncApp, @@ -288,7 +288,7 @@ impl LspAdapter for ExtensionLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, _cx: &mut AsyncApp, ) -> Result { let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; @@ -336,7 +336,7 @@ impl LspAdapter for ExtensionLspAdapter { target_language_server_id: LanguageServerName, _: &dyn Fs, delegate: &Arc, - _: Arc, + _cx: &mut AsyncApp, ) -> Result> { let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; diff --git a/crates/language_extension/src/language_extension.rs b/crates/language_extension/src/language_extension.rs index 1915eae2d18fe5fb96dbb0dcca614f8a4f41bb81..510f870ce8afbda090817e0ce515d4c5c2e3c63b 100644 --- a/crates/language_extension/src/language_extension.rs +++ b/crates/language_extension/src/language_extension.rs @@ -52,7 +52,7 @@ impl ExtensionLanguageProxy for LanguageServerRegistryProxy { load: Arc Result + Send + Sync + 'static>, ) { self.language_registry - .register_language(language, grammar, matcher, hidden, load); + .register_language(language, grammar, matcher, hidden, None, load); } fn remove_languages( @@ -61,6 +61,6 @@ impl ExtensionLanguageProxy for LanguageServerRegistryProxy { grammars_to_remove: &[Arc], ) { self.language_registry - .remove_languages(&languages_to_remove, &grammars_to_remove); + .remove_languages(languages_to_remove, grammars_to_remove); } } diff --git a/crates/language_model/src/fake_provider.rs b/crates/language_model/src/fake_provider.rs index a9c7d5c0343295ff02d9d693f2cdbe3d92f1e07d..ebfd37d16cf622a622047b7f5babedebd541ad57 100644 --- a/crates/language_model/src/fake_provider.rs +++ b/crates/language_model/src/fake_provider.rs @@ -1,13 +1,14 @@ use crate::{ - AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, - LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, - LanguageModelToolChoice, + AuthenticateError, ConfigurationViewTargetAgent, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, LanguageModelToolChoice, }; -use futures::{FutureExt, StreamExt, channel::mpsc, future::BoxFuture, stream::BoxStream}; +use futures::{FutureExt, channel::mpsc, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Entity, Task, Window}; use http_client::Result; use parking_lot::Mutex; +use smol::stream::StreamExt; use std::sync::Arc; #[derive(Clone)] @@ -62,7 +63,12 @@ impl LanguageModelProvider for FakeLanguageModelProvider { Task::ready(Ok(())) } - fn configuration_view(&self, _window: &mut Window, _: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: ConfigurationViewTargetAgent, + _window: &mut Window, + _: &mut App, + ) -> AnyView { unimplemented!() } @@ -95,7 +101,9 @@ pub struct FakeLanguageModel { current_completion_txs: Mutex< Vec<( LanguageModelRequest, - mpsc::UnboundedSender, + mpsc::UnboundedSender< + Result, + >, )>, >, } @@ -145,7 +153,21 @@ impl FakeLanguageModel { .find(|(req, _)| req == request) .map(|(_, tx)| tx) .unwrap(); - tx.unbounded_send(event.into()).unwrap(); + tx.unbounded_send(Ok(event.into())).unwrap(); + } + + pub fn send_completion_stream_error( + &self, + request: &LanguageModelRequest, + error: impl Into, + ) { + let current_completion_txs = self.current_completion_txs.lock(); + let tx = current_completion_txs + .iter() + .find(|(req, _)| req == request) + .map(|(_, tx)| tx) + .unwrap(); + tx.unbounded_send(Err(error.into())).unwrap(); } pub fn end_completion_stream(&self, request: &LanguageModelRequest) { @@ -165,6 +187,13 @@ impl FakeLanguageModel { self.send_completion_stream_event(self.pending_completions().last().unwrap(), event); } + pub fn send_last_completion_stream_error( + &self, + error: impl Into, + ) { + self.send_completion_stream_error(self.pending_completions().last().unwrap(), error); + } + pub fn end_last_completion_stream(&self) { self.end_completion_stream(self.pending_completions().last().unwrap()); } @@ -224,7 +253,7 @@ impl LanguageModel for FakeLanguageModel { > { let (tx, rx) = mpsc::unbounded(); self.current_completion_txs.lock().push((request, tx)); - async move { Ok(rx.map(Ok).boxed()) }.boxed() + async move { Ok(rx.boxed()) }.boxed() } fn as_fake(&self) -> &Self { diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 1637d2de8a3c14b910ea345c03a4eb5db13df28d..158bebcbbf8843c25f5030f5b4b6e4ae436f371a 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -54,7 +54,7 @@ pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = pub fn init(client: Arc, cx: &mut App) { init_settings(cx); - RefreshLlmTokenListener::register(client.clone(), cx); + RefreshLlmTokenListener::register(client, cx); } pub fn init_settings(cx: &mut App) { @@ -300,7 +300,7 @@ impl From for LanguageModelCompletionError { }, AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded { provider, - retry_after: retry_after, + retry_after, }, AnthropicError::ApiError(api_error) => api_error.into(), } @@ -538,7 +538,7 @@ pub trait LanguageModel: Send + Sync { if let Some(first_event) = events.next().await { match first_event { Ok(LanguageModelCompletionEvent::StartMessage { message_id: id }) => { - message_id = Some(id.clone()); + message_id = Some(id); } Ok(LanguageModelCompletionEvent::Text(text)) => { first_item_text = Some(text); @@ -634,7 +634,12 @@ pub trait LanguageModelProvider: 'static { } fn is_authenticated(&self, cx: &App) -> bool; fn authenticate(&self, cx: &mut App) -> Task>; - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView; + fn configuration_view( + &self, + target_agent: ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView; fn must_accept_terms(&self, _cx: &App) -> bool { false } @@ -648,6 +653,13 @@ pub trait LanguageModelProvider: 'static { fn reset_credentials(&self, cx: &mut App) -> Task>; } +#[derive(Default, Clone, Copy)] +pub enum ConfigurationViewTargetAgent { + #[default] + ZedAgent, + Other(&'static str), +} + #[derive(PartialEq, Eq)] pub enum LanguageModelProviderTosView { /// When there are some past interactions in the Agent Panel. diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 3b4c1fa269020d1bf17d98cbb67251902536dafc..8a7f3456fbb54826809e8a25c2c767d387afcd4e 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -42,6 +42,18 @@ impl fmt::Display for ModelRequestLimitReachedError { } } +#[derive(Error, Debug)] +pub struct ToolUseLimitReachedError; + +impl fmt::Display for ToolUseLimitReachedError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "Consecutive tool use limit reached. Enable Burn Mode for unlimited tool use." + ) + } +} + #[derive(Clone, Default)] pub struct LlmApiToken(Arc>>); @@ -70,7 +82,7 @@ impl LlmApiToken { let response = client.cloud_client().create_llm_token(system_id).await?; *lock = Some(response.token.0.clone()); - Ok(response.token.0.clone()) + Ok(response.token.0) } } diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 7cf071808a2c0d95bf9aa5a41eaa260cff533d57..8f52f8c1c369b4614851a05c40c3c9146dcd01d8 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -21,7 +21,7 @@ impl Global for GlobalLanguageModelRegistry {} pub enum ConfigurationError { #[error("Configure at least one LLM provider to start using the panel.")] NoProvider, - #[error("LLM Provider is not configured or does not support the configured model.")] + #[error("LLM provider is not configured or does not support the configured model.")] ModelNotFound, #[error("{} LLM provider is not configured.", .0.name().0)] ProviderNotAuthenticated(Arc), @@ -107,7 +107,7 @@ pub enum Event { InlineAssistantModelChanged, CommitMessageModelChanged, ThreadSummaryModelChanged, - ProviderStateChanged, + ProviderStateChanged(LanguageModelProviderId), AddedProvider(LanguageModelProviderId), RemovedProvider(LanguageModelProviderId), } @@ -148,8 +148,11 @@ impl LanguageModelRegistry { ) { let id = provider.id(); - let subscription = provider.subscribe(cx, |_, cx| { - cx.emit(Event::ProviderStateChanged); + let subscription = provider.subscribe(cx, { + let id = id.clone(); + move |_, cx| { + cx.emit(Event::ProviderStateChanged(id.clone())); + } }); if let Some(subscription) = subscription { subscription.detach(); diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index dc485e9937f7e6579f99212609e6f2383c11ae6c..1182e0f7a8f1952a62832970ca63f3684eea5b17 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -220,42 +220,39 @@ impl<'de> Deserialize<'de> for LanguageModelToolResultContent { // Accept wrapped text format: { "type": "text", "text": "..." } if let (Some(type_value), Some(text_value)) = - (get_field(&obj, "type"), get_field(&obj, "text")) + (get_field(obj, "type"), get_field(obj, "text")) + && let Some(type_str) = type_value.as_str() + && type_str.to_lowercase() == "text" + && let Some(text) = text_value.as_str() { - if let Some(type_str) = type_value.as_str() { - if type_str.to_lowercase() == "text" { - if let Some(text) = text_value.as_str() { - return Ok(Self::Text(Arc::from(text))); - } - } - } + return Ok(Self::Text(Arc::from(text))); } // Check for wrapped Text variant: { "text": "..." } - if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "text") { - if obj.len() == 1 { - // Only one field, and it's "text" (case-insensitive) - if let Some(text) = value.as_str() { - return Ok(Self::Text(Arc::from(text))); - } + if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "text") + && obj.len() == 1 + { + // Only one field, and it's "text" (case-insensitive) + if let Some(text) = value.as_str() { + return Ok(Self::Text(Arc::from(text))); } } // Check for wrapped Image variant: { "image": { "source": "...", "size": ... } } - if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "image") { - if obj.len() == 1 { - // Only one field, and it's "image" (case-insensitive) - // Try to parse the nested image object - if let Some(image_obj) = value.as_object() { - if let Some(image) = LanguageModelImage::from_json(image_obj) { - return Ok(Self::Image(image)); - } - } + if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "image") + && obj.len() == 1 + { + // Only one field, and it's "image" (case-insensitive) + // Try to parse the nested image object + if let Some(image_obj) = value.as_object() + && let Some(image) = LanguageModelImage::from_json(image_obj) + { + return Ok(Self::Image(image)); } } // Try as direct Image (object with "source" and "size" fields) - if let Some(image) = LanguageModelImage::from_json(&obj) { + if let Some(image) = LanguageModelImage::from_json(obj) { return Ok(Self::Image(image)); } } @@ -272,7 +269,7 @@ impl<'de> Deserialize<'de> for LanguageModelToolResultContent { impl LanguageModelToolResultContent { pub fn to_str(&self) -> Option<&str> { match self { - Self::Text(text) => Some(&text), + Self::Text(text) => Some(text), Self::Image(_) => None, } } @@ -297,6 +294,12 @@ impl From for LanguageModelToolResultContent { } } +impl From for LanguageModelToolResultContent { + fn from(image: LanguageModelImage) -> Self { + Self::Image(image) + } +} + #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)] pub enum MessageContent { Text(String), diff --git a/crates/language_model/src/role.rs b/crates/language_model/src/role.rs index 953dfa6fdff91c61a3a444076fd768f260b882c5..4b47ef36dd564e5950ce7d42a7e4f9263f3998b7 100644 --- a/crates/language_model/src/role.rs +++ b/crates/language_model/src/role.rs @@ -19,7 +19,7 @@ impl Role { } } - pub fn to_proto(&self) -> proto::LanguageModelRole { + pub fn to_proto(self) -> proto::LanguageModelRole { match self { Role::User => proto::LanguageModelRole::LanguageModelUser, Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant, diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 18e6f47ed0591256591df578f98dcaf988ed6444..738b72b0c9a6dbb7c9606cc72707b27e66abf09c 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -104,7 +104,7 @@ fn register_language_model_providers( cx: &mut Context, ) { registry.register_provider( - CloudLanguageModelProvider::new(user_store.clone(), client.clone(), cx), + CloudLanguageModelProvider::new(user_store, client.clone(), cx), cx, ); diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index ef21e85f711e41722d4ac421ba1d0a89b422b6a6..0d061c058765cd507c3785b82da3d055d0bc12be 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -15,11 +15,11 @@ use gpui::{ }; use http_client::HttpClient; use language_model::{ - AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, - LanguageModelCompletionError, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolResultContent, MessageContent, - RateLimiter, Role, + AuthenticateError, ConfigurationViewTargetAgent, LanguageModel, + LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelId, + LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, + LanguageModelToolResultContent, MessageContent, RateLimiter, Role, }; use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason}; use schemars::JsonSchema; @@ -114,7 +114,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .ok(); this.update(cx, |this, cx| { @@ -133,7 +133,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .ok(); @@ -153,29 +153,14 @@ impl State { return Task::ready(Ok(())); } - let credentials_provider = ::global(cx); - let api_url = AllLanguageModelSettings::get_global(cx) - .anthropic - .api_url - .clone(); + let key = AnthropicLanguageModelProvider::api_key(cx); cx.spawn(async move |this, cx| { - let (api_key, from_env) = if let Ok(api_key) = std::env::var(ANTHROPIC_API_KEY_VAR) { - (api_key, true) - } else { - let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) - .await? - .ok_or(AuthenticateError::CredentialsNotFound)?; - ( - String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?, - false, - ) - }; + let key = key.await?; this.update(cx, |this, cx| { - this.api_key = Some(api_key); - this.api_key_from_env = from_env; + this.api_key = Some(key.key); + this.api_key_from_env = key.from_env; cx.notify(); })?; @@ -184,6 +169,11 @@ impl State { } } +pub struct ApiKey { + pub key: String, + pub from_env: bool, +} + impl AnthropicLanguageModelProvider { pub fn new(http_client: Arc, cx: &mut App) -> Self { let state = cx.new(|cx| State { @@ -206,6 +196,33 @@ impl AnthropicLanguageModelProvider { request_limiter: RateLimiter::new(4), }) } + + pub fn api_key(cx: &mut App) -> Task> { + let credentials_provider = ::global(cx); + let api_url = AllLanguageModelSettings::get_global(cx) + .anthropic + .api_url + .clone(); + + if let Ok(key) = std::env::var(ANTHROPIC_API_KEY_VAR) { + Task::ready(Ok(ApiKey { + key, + from_env: true, + })) + } else { + cx.spawn(async move |cx| { + let (_, api_key) = credentials_provider + .read_credentials(&api_url, cx) + .await? + .ok_or(AuthenticateError::CredentialsNotFound)?; + + Ok(ApiKey { + key: String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?, + from_env: false, + }) + }) + } + } } impl LanguageModelProviderState for AnthropicLanguageModelProvider { @@ -299,8 +316,13 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { - cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + fn configuration_view( + &self, + target_agent: ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), target_agent, window, cx)) .into() } @@ -532,7 +554,7 @@ pub fn into_anthropic( .into_iter() .filter_map(|content| match content { MessageContent::Text(text) => { - let text = if text.chars().last().map_or(false, |c| c.is_whitespace()) { + let text = if text.chars().last().is_some_and(|c| c.is_whitespace()) { text.trim_end().to_string() } else { text @@ -611,11 +633,11 @@ pub fn into_anthropic( Role::Assistant => anthropic::Role::Assistant, Role::System => unreachable!("System role should never occur here"), }; - if let Some(last_message) = new_messages.last_mut() { - if last_message.role == anthropic_role { - last_message.content.extend(anthropic_message_content); - continue; - } + if let Some(last_message) = new_messages.last_mut() + && last_message.role == anthropic_role + { + last_message.content.extend(anthropic_message_content); + continue; } // Mark the last segment of the message as cached @@ -791,7 +813,7 @@ impl AnthropicEventMapper { ))]; } } - return vec![]; + vec![] } }, Event::ContentBlockStop { index } => { @@ -902,12 +924,18 @@ struct ConfigurationView { api_key_editor: Entity, state: gpui::Entity, load_credentials_task: Option>, + target_agent: ConfigurationViewTargetAgent, } impl ConfigurationView { const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"; - fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + fn new( + state: gpui::Entity, + target_agent: ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut Context, + ) -> Self { cx.observe(&state, |_, _, cx| { cx.notify(); }) @@ -939,6 +967,7 @@ impl ConfigurationView { }), state, load_credentials_task, + target_agent, } } @@ -1012,7 +1041,10 @@ impl Render for ConfigurationView { v_flex() .size_full() .on_action(cx.listener(Self::save_api_key)) - .child(Label::new("To use Zed's agent with Anthropic, you need to add an API key. Follow these steps:")) + .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match self.target_agent { + ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Anthropic", + ConfigurationViewTargetAgent::Other(agent) => agent, + }))) .child( List::new() .child( @@ -1023,7 +1055,7 @@ impl Render for ConfigurationView { ) ) .child( - InstructionListItem::text_only("Paste your API key below and hit enter to start using the assistant") + InstructionListItem::text_only("Paste your API key below and hit enter to start using the agent") ) ) .child( diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 6df96c5c566aac6f23af837491292cc89a56c74a..178c767950e640801ada7291cd8eb317fcba232c 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -150,7 +150,7 @@ impl State { let credentials_provider = ::global(cx); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(AMAZON_AWS_URL, &cx) + .delete_credentials(AMAZON_AWS_URL, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -174,7 +174,7 @@ impl State { AMAZON_AWS_URL, "Bearer", &serde_json::to_vec(&credentials)?, - &cx, + cx, ) .await?; this.update(cx, |this, cx| { @@ -206,7 +206,7 @@ impl State { (credentials, true) } else { let (_, credentials) = credentials_provider - .read_credentials(AMAZON_AWS_URL, &cx) + .read_credentials(AMAZON_AWS_URL, cx) .await? .ok_or_else(|| AuthenticateError::CredentialsNotFound)?; ( @@ -348,7 +348,12 @@ impl LanguageModelProvider for BedrockLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } @@ -407,10 +412,10 @@ impl BedrockModel { .region(Region::new(region)) .timeout_config(TimeoutConfig::disabled()); - if let Some(endpoint_url) = endpoint { - if !endpoint_url.is_empty() { - config_builder = config_builder.endpoint_url(endpoint_url); - } + if let Some(endpoint_url) = endpoint + && !endpoint_url.is_empty() + { + config_builder = config_builder.endpoint_url(endpoint_url); } match auth_method { @@ -460,7 +465,7 @@ impl BedrockModel { Result>>, > { let Ok(runtime_client) = self - .get_or_init_client(&cx) + .get_or_init_client(cx) .cloned() .context("Bedrock client not initialized") else { @@ -723,11 +728,11 @@ pub fn into_bedrock( Role::Assistant => bedrock::BedrockRole::Assistant, Role::System => unreachable!("System role should never occur here"), }; - if let Some(last_message) = new_messages.last_mut() { - if last_message.role == bedrock_role { - last_message.content.extend(bedrock_message_content); - continue; - } + if let Some(last_message) = new_messages.last_mut() + && last_message.role == bedrock_role + { + last_message.content.extend(bedrock_message_content); + continue; } new_messages.push( BedrockMessage::builder() @@ -912,7 +917,7 @@ pub fn map_to_language_model_completion_events( Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking { ReasoningContentBlockDelta::Text(thoughts) => { Some(Ok(LanguageModelCompletionEvent::Thinking { - text: thoughts.clone(), + text: thoughts, signature: None, })) } @@ -963,7 +968,7 @@ pub fn map_to_language_model_completion_events( id: tool_use.id.into(), name: tool_use.name.into(), is_input_complete: true, - raw_input: tool_use.input_json.clone(), + raw_input: tool_use.input_json, input, }, )) @@ -1081,21 +1086,18 @@ impl ConfigurationView { .access_key_id_editor .read(cx) .text(cx) - .to_string() .trim() .to_string(); let secret_access_key = self .secret_access_key_editor .read(cx) .text(cx) - .to_string() .trim() .to_string(); let session_token = self .session_token_editor .read(cx) .text(cx) - .to_string() .trim() .to_string(); let session_token = if session_token.is_empty() { @@ -1103,13 +1105,7 @@ impl ConfigurationView { } else { Some(session_token) }; - let region = self - .region_editor - .read(cx) - .text(cx) - .to_string() - .trim() - .to_string(); + let region = self.region_editor.read(cx).text(cx).trim().to_string(); let region = if region.is_empty() { "us-east-1".to_string() } else { diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 40dd12076113ade6cff3563795472482573faf16..b1b5ff3eb39695ea521dc986b39ffd2d0d194f99 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -140,7 +140,7 @@ impl State { Self { client: client.clone(), llm_api_token: LlmApiToken::default(), - user_store: user_store.clone(), + user_store, status, accept_terms_of_service_task: None, models: Vec::new(), @@ -193,7 +193,7 @@ impl State { fn authenticate(&self, cx: &mut Context) -> Task> { let client = self.client.clone(); cx.spawn(async move |state, cx| { - client.sign_in_with_optional_connect(true, &cx).await?; + client.sign_in_with_optional_connect(true, cx).await?; state.update(cx, |_, cx| cx.notify()) }) } @@ -270,7 +270,7 @@ impl State { if response.status().is_success() { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; - return Ok(serde_json::from_str(&body)?); + Ok(serde_json::from_str(&body)?) } else { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; @@ -307,7 +307,7 @@ impl CloudLanguageModelProvider { Self { client, - state: state.clone(), + state, _maintain_client_status: maintain_client_status, } } @@ -320,7 +320,7 @@ impl CloudLanguageModelProvider { Arc::new(CloudLanguageModel { id: LanguageModelId(SharedString::from(model.id.0.clone())), model, - llm_api_token: llm_api_token.clone(), + llm_api_token, client: self.client.clone(), request_limiter: RateLimiter::new(4), }) @@ -391,7 +391,12 @@ impl LanguageModelProvider for CloudLanguageModelProvider { Task::ready(Ok(())) } - fn configuration_view(&self, _: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + _: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|_| ConfigurationView::new(self.state.clone())) .into() } @@ -437,7 +442,7 @@ fn render_accept_terms( .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) .icon_color(Color::Muted) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .when(thread_empty_state, |this| this.label_size(LabelSize::Small)) .on_click(move |_, _window, cx| cx.open_url("https://zed.dev/terms-of-service")); @@ -592,15 +597,13 @@ impl CloudLanguageModel { .headers() .get(SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME) .and_then(|resource| resource.to_str().ok()) - { - if let Some(plan) = response + && let Some(plan) = response .headers() .get(CURRENT_PLAN_HEADER_NAME) .and_then(|plan| plan.to_str().ok()) .and_then(|plan| cloud_llm_client::Plan::from_str(plan).ok()) - { - return Err(anyhow!(ModelRequestLimitReachedError { plan })); - } + { + return Err(anyhow!(ModelRequestLimitReachedError { plan })); } } else if status == StatusCode::PAYMENT_REQUIRED { return Err(anyhow!(PaymentRequiredError)); @@ -657,29 +660,29 @@ where impl From for LanguageModelCompletionError { fn from(error: ApiError) -> Self { - if let Ok(cloud_error) = serde_json::from_str::(&error.body) { - if cloud_error.code.starts_with("upstream_http_") { - let status = if let Some(status) = cloud_error.upstream_status { - status - } else if cloud_error.code.ends_with("_error") { - error.status - } else { - // If there's a status code in the code string (e.g. "upstream_http_429") - // then use that; otherwise, see if the JSON contains a status code. - cloud_error - .code - .strip_prefix("upstream_http_") - .and_then(|code_str| code_str.parse::().ok()) - .and_then(|code| StatusCode::from_u16(code).ok()) - .unwrap_or(error.status) - }; + if let Ok(cloud_error) = serde_json::from_str::(&error.body) + && cloud_error.code.starts_with("upstream_http_") + { + let status = if let Some(status) = cloud_error.upstream_status { + status + } else if cloud_error.code.ends_with("_error") { + error.status + } else { + // If there's a status code in the code string (e.g. "upstream_http_429") + // then use that; otherwise, see if the JSON contains a status code. + cloud_error + .code + .strip_prefix("upstream_http_") + .and_then(|code_str| code_str.parse::().ok()) + .and_then(|code| StatusCode::from_u16(code).ok()) + .unwrap_or(error.status) + }; - return LanguageModelCompletionError::UpstreamProviderError { - message: cloud_error.message, - status, - retry_after: cloud_error.retry_after.map(Duration::from_secs_f64), - }; - } + return LanguageModelCompletionError::UpstreamProviderError { + message: cloud_error.message, + status, + retry_after: cloud_error.retry_after.map(Duration::from_secs_f64), + }; } let retry_after = None; @@ -941,6 +944,8 @@ impl LanguageModel for CloudLanguageModel { request, model.id(), model.supports_parallel_tool_calls(), + model.supports_prompt_cache_key(), + None, None, ); let llm_api_token = self.llm_api_token.clone(); diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 73f73a9a313c764d45adfd14910efd801a472f1c..eb12c0056f871a4d9eb053c51455081572868aef 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -176,7 +176,12 @@ impl LanguageModelProvider for CopilotChatLanguageModelProvider { Task::ready(Err(err.into())) } - fn configuration_view(&self, _: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + _: &mut Window, + cx: &mut App, + ) -> AnyView { let state = self.state.clone(); cx.new(|cx| ConfigurationView::new(state, cx)).into() } diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index a568ef4034193b5b1078d2ec4907d18fb0762efa..8c7f8bcc351851ebb9cc90aaa9c9fa2eed59119e 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -77,7 +77,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -96,7 +96,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await?; this.update(cx, |this, cx| { this.api_key = Some(api_key); @@ -120,7 +120,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -229,7 +229,12 @@ impl LanguageModelProvider for DeepSeekLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index b287e8181a2ac5d04650d799a0cd9b23d51749c2..566620675ee759516d767804bff837ec953d8369 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -12,9 +12,9 @@ use gpui::{ }; use http_client::HttpClient; use language_model::{ - AuthenticateError, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelToolChoice, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, MessageContent, StopReason, + AuthenticateError, ConfigurationViewTargetAgent, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolSchemaFormat, + LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason, }; use language_model::{ LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, @@ -37,6 +37,8 @@ use util::ResultExt; use crate::AllLanguageModelSettings; use crate::ui::InstructionListItem; +use super::anthropic::ApiKey; + const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID; const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME; @@ -110,7 +112,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -129,7 +131,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await?; this.update(cx, |this, cx| { this.api_key = Some(api_key); @@ -156,7 +158,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -198,6 +200,33 @@ impl GoogleLanguageModelProvider { request_limiter: RateLimiter::new(4), }) } + + pub fn api_key(cx: &mut App) -> Task> { + let credentials_provider = ::global(cx); + let api_url = AllLanguageModelSettings::get_global(cx) + .google + .api_url + .clone(); + + if let Ok(key) = std::env::var(GEMINI_API_KEY_VAR) { + Task::ready(Ok(ApiKey { + key, + from_env: true, + })) + } else { + cx.spawn(async move |cx| { + let (_, api_key) = credentials_provider + .read_credentials(&api_url, cx) + .await? + .ok_or(AuthenticateError::CredentialsNotFound)?; + + Ok(ApiKey { + key: String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?, + from_env: false, + }) + }) + } + } } impl LanguageModelProviderState for GoogleLanguageModelProvider { @@ -277,8 +306,13 @@ impl LanguageModelProvider for GoogleLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { - cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + fn configuration_view( + &self, + target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), target_agent, window, cx)) .into() } @@ -382,7 +416,7 @@ impl LanguageModel for GoogleLanguageModel { cx: &App, ) -> BoxFuture<'static, Result> { let model_id = self.model.request_id().to_string(); - let request = into_google(request, model_id.clone(), self.model.mode()); + let request = into_google(request, model_id, self.model.mode()); let http_client = self.http_client.clone(); let api_key = self.state.read(cx).api_key.clone(); @@ -525,7 +559,7 @@ pub fn into_google( let system_instructions = if request .messages .first() - .map_or(false, |msg| matches!(msg.role, Role::System)) + .is_some_and(|msg| matches!(msg.role, Role::System)) { let message = request.messages.remove(0); Some(SystemInstruction { @@ -572,7 +606,7 @@ pub fn into_google( top_k: None, }), safety_settings: None, - tools: (request.tools.len() > 0).then(|| { + tools: (!request.tools.is_empty()).then(|| { vec![google_ai::Tool { function_declarations: request .tools @@ -771,11 +805,17 @@ fn convert_usage(usage: &UsageMetadata) -> language_model::TokenUsage { struct ConfigurationView { api_key_editor: Entity, state: gpui::Entity, + target_agent: language_model::ConfigurationViewTargetAgent, load_credentials_task: Option>, } impl ConfigurationView { - fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + fn new( + state: gpui::Entity, + target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut Context, + ) -> Self { cx.observe(&state, |_, _, cx| { cx.notify(); }) @@ -805,6 +845,7 @@ impl ConfigurationView { editor.set_placeholder_text("AIzaSy...", cx); editor }), + target_agent, state, load_credentials_task, } @@ -880,7 +921,10 @@ impl Render for ConfigurationView { v_flex() .size_full() .on_action(cx.listener(Self::save_api_key)) - .child(Label::new("To use Zed's agent with Google AI, you need to add an API key. Follow these steps:")) + .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match self.target_agent { + ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Google AI", + ConfigurationViewTargetAgent::Other(agent) => agent, + }))) .child( List::new() .child(InstructionListItem::new( diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 9792b4f27b9990c1c9c64fbda971f7e45490fc49..80b28a396b958ab20de3faa0a0f6919c57011e5c 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -210,7 +210,7 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider { .map(|model| { Arc::new(LmStudioLanguageModel { id: LanguageModelId::from(model.name.clone()), - model: model.clone(), + model, http_client: self.http_client.clone(), request_limiter: RateLimiter::new(4), }) as Arc @@ -226,7 +226,12 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, _window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + _window: &mut Window, + cx: &mut App, + ) -> AnyView { let state = self.state.clone(); cx.new(|cx| ConfigurationView::new(state, cx)).into() } @@ -690,7 +695,7 @@ impl Render for ConfigurationView { Button::new("lmstudio-site", "LM Studio") .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| { cx.open_url(LMSTUDIO_SITE) @@ -705,7 +710,7 @@ impl Render for ConfigurationView { ) .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| { cx.open_url(LMSTUDIO_DOWNLOAD_URL) @@ -718,7 +723,7 @@ impl Render for ConfigurationView { Button::new("view-models", "Model Catalog") .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| { cx.open_url(LMSTUDIO_CATALOG_URL) @@ -744,7 +749,7 @@ impl Render for ConfigurationView { Button::new("retry_lmstudio_models", "Connect") .icon_position(IconPosition::Start) .icon_size(IconSize::XSmall) - .icon(IconName::PlayOutlined) + .icon(IconName::PlayFilled) .on_click(cx.listener(move |this, _, _window, cx| { this.retry_connection(cx) })), diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 02e53cb99a846ab24ffdcd2f6c087b436d4f3789..3f8c2e2a678d019aad1ff90688b145ffe6c740dd 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -47,6 +47,7 @@ pub struct AvailableModel { pub max_completion_tokens: Option, pub supports_tools: Option, pub supports_images: Option, + pub supports_thinking: Option, } pub struct MistralLanguageModelProvider { @@ -75,7 +76,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -94,7 +95,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await?; this.update(cx, |this, cx| { this.api_key = Some(api_key); @@ -118,7 +119,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -215,6 +216,7 @@ impl LanguageModelProvider for MistralLanguageModelProvider { max_completion_tokens: model.max_completion_tokens, supports_tools: model.supports_tools, supports_images: model.supports_images, + supports_thinking: model.supports_thinking, }, ); } @@ -241,7 +243,12 @@ impl LanguageModelProvider for MistralLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } @@ -366,11 +373,7 @@ impl LanguageModel for MistralLanguageModel { LanguageModelCompletionError, >, > { - let request = into_mistral( - request, - self.model.id().to_string(), - self.max_output_tokens(), - ); + let request = into_mistral(request, self.model.clone(), self.max_output_tokens()); let stream = self.stream_completion(request, cx); async move { @@ -384,7 +387,7 @@ impl LanguageModel for MistralLanguageModel { pub fn into_mistral( request: LanguageModelRequest, - model: String, + model: mistral::Model, max_output_tokens: Option, ) -> mistral::Request { let stream = true; @@ -401,13 +404,20 @@ pub fn into_mistral( .push_part(mistral::MessagePart::Text { text: text.clone() }); } MessageContent::Image(image_content) => { - message_content.push_part(mistral::MessagePart::ImageUrl { - image_url: image_content.to_base64_url(), - }); + if model.supports_images() { + message_content.push_part(mistral::MessagePart::ImageUrl { + image_url: image_content.to_base64_url(), + }); + } } MessageContent::Thinking { text, .. } => { - message_content - .push_part(mistral::MessagePart::Text { text: text.clone() }); + if model.supports_thinking() { + message_content.push_part(mistral::MessagePart::Thinking { + thinking: vec![mistral::ThinkingPart::Text { + text: text.clone(), + }], + }); + } } MessageContent::RedactedThinking(_) => {} MessageContent::ToolUse(_) => { @@ -437,12 +447,28 @@ pub fn into_mistral( Role::Assistant => { for content in &message.content { match content { - MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { + MessageContent::Text(text) => { messages.push(mistral::RequestMessage::Assistant { - content: Some(text.clone()), + content: Some(mistral::MessageContent::Plain { + content: text.clone(), + }), tool_calls: Vec::new(), }); } + MessageContent::Thinking { text, .. } => { + if model.supports_thinking() { + messages.push(mistral::RequestMessage::Assistant { + content: Some(mistral::MessageContent::Multipart { + content: vec![mistral::MessagePart::Thinking { + thinking: vec![mistral::ThinkingPart::Text { + text: text.clone(), + }], + }], + }), + tool_calls: Vec::new(), + }); + } + } MessageContent::RedactedThinking(_) => {} MessageContent::Image(_) => {} MessageContent::ToolUse(tool_use) => { @@ -477,11 +503,26 @@ pub fn into_mistral( Role::System => { for content in &message.content { match content { - MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { + MessageContent::Text(text) => { messages.push(mistral::RequestMessage::System { - content: text.clone(), + content: mistral::MessageContent::Plain { + content: text.clone(), + }, }); } + MessageContent::Thinking { text, .. } => { + if model.supports_thinking() { + messages.push(mistral::RequestMessage::System { + content: mistral::MessageContent::Multipart { + content: vec![mistral::MessagePart::Thinking { + thinking: vec![mistral::ThinkingPart::Text { + text: text.clone(), + }], + }], + }, + }); + } + } MessageContent::RedactedThinking(_) => {} MessageContent::Image(_) | MessageContent::ToolUse(_) @@ -494,37 +535,8 @@ pub fn into_mistral( } } - // The Mistral API requires that tool messages be followed by assistant messages, - // not user messages. When we have a tool->user sequence in the conversation, - // we need to insert a placeholder assistant message to maintain proper conversation - // flow and prevent API errors. This is a Mistral-specific requirement that differs - // from other language model APIs. - let messages = { - let mut fixed_messages = Vec::with_capacity(messages.len()); - let mut messages_iter = messages.into_iter().peekable(); - - while let Some(message) = messages_iter.next() { - let is_tool_message = matches!(message, mistral::RequestMessage::Tool { .. }); - fixed_messages.push(message); - - // Insert assistant message between tool and user messages - if is_tool_message { - if let Some(next_msg) = messages_iter.peek() { - if matches!(next_msg, mistral::RequestMessage::User { .. }) { - fixed_messages.push(mistral::RequestMessage::Assistant { - content: Some(" ".to_string()), - tool_calls: Vec::new(), - }); - } - } - } - } - - fixed_messages - }; - mistral::Request { - model, + model: model.id().to_string(), messages, stream, max_tokens: max_output_tokens, @@ -595,8 +607,38 @@ impl MistralEventMapper { }; let mut events = Vec::new(); - if let Some(content) = choice.delta.content.clone() { - events.push(Ok(LanguageModelCompletionEvent::Text(content))); + if let Some(content) = choice.delta.content.as_ref() { + match content { + mistral::MessageContentDelta::Text(text) => { + events.push(Ok(LanguageModelCompletionEvent::Text(text.clone()))); + } + mistral::MessageContentDelta::Parts(parts) => { + for part in parts { + match part { + mistral::MessagePart::Text { text } => { + events.push(Ok(LanguageModelCompletionEvent::Text(text.clone()))); + } + mistral::MessagePart::Thinking { thinking } => { + for tp in thinking.iter().cloned() { + match tp { + mistral::ThinkingPart::Text { text } => { + events.push(Ok( + LanguageModelCompletionEvent::Thinking { + text, + signature: None, + }, + )); + } + } + } + } + mistral::MessagePart::ImageUrl { .. } => { + // We currently don't emit a separate event for images in responses. + } + } + } + } + } } if let Some(tool_calls) = choice.delta.tool_calls.as_ref() { @@ -908,7 +950,7 @@ mod tests { thinking_allowed: true, }; - let mistral_request = into_mistral(request, "mistral-small-latest".into(), None); + let mistral_request = into_mistral(request, mistral::Model::MistralSmallLatest, None); assert_eq!(mistral_request.model, "mistral-small-latest"); assert_eq!(mistral_request.temperature, Some(0.5)); @@ -941,7 +983,7 @@ mod tests { thinking_allowed: true, }; - let mistral_request = into_mistral(request, "pixtral-12b-latest".into(), None); + let mistral_request = into_mistral(request, mistral::Model::Pixtral12BLatest, None); assert_eq!(mistral_request.messages.len(), 1); assert!(matches!( diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index c845c97b09c5f7ca7205a43b889065de06c39550..3f2d47fba35959e6c26205193dddba45c2df25cc 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -237,7 +237,7 @@ impl LanguageModelProvider for OllamaLanguageModelProvider { .map(|model| { Arc::new(OllamaLanguageModel { id: LanguageModelId::from(model.name.clone()), - model: model.clone(), + model, http_client: self.http_client.clone(), request_limiter: RateLimiter::new(4), }) as Arc @@ -255,7 +255,12 @@ impl LanguageModelProvider for OllamaLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { let state = self.state.clone(); cx.new(|cx| ConfigurationView::new(state, window, cx)) .into() @@ -608,7 +613,7 @@ impl Render for ConfigurationView { Button::new("ollama-site", "Ollama") .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE)) .into_any_element(), @@ -621,7 +626,7 @@ impl Render for ConfigurationView { ) .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _, cx| { cx.open_url(OLLAMA_DOWNLOAD_URL) @@ -634,7 +639,7 @@ impl Render for ConfigurationView { Button::new("view-models", "View All Models") .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)), ), @@ -658,7 +663,7 @@ impl Render for ConfigurationView { Button::new("retry_ollama_models", "Connect") .icon_position(IconPosition::Start) .icon_size(IconSize::XSmall) - .icon(IconName::PlayOutlined) + .icon(IconName::PlayFilled) .on_click(cx.listener(move |this, _, _, cx| { this.retry_connection(cx) })), diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index ee74562687b5de4258328a1f7ffae082d5dc4931..4348fd42110b2554de801b812a7b001dc49ad06e 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -14,7 +14,7 @@ use language_model::{ RateLimiter, Role, StopReason, TokenUsage, }; use menu; -use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion}; +use open_ai::{ImageUrl, Model, ReasoningEffort, ResponseStreamEvent, stream_completion}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; @@ -45,6 +45,7 @@ pub struct AvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + pub reasoning_effort: Option, } pub struct OpenAiLanguageModelProvider { @@ -74,7 +75,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -93,7 +94,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .log_err(); this.update(cx, |this, cx| { @@ -118,7 +119,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -213,6 +214,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, max_completion_tokens: model.max_completion_tokens, + reasoning_effort: model.reasoning_effort.clone(), }, ); } @@ -231,7 +233,12 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } @@ -301,7 +308,25 @@ impl LanguageModel for OpenAiLanguageModel { } fn supports_images(&self) -> bool { - false + use open_ai::Model; + match &self.model { + Model::FourOmni + | Model::FourOmniMini + | Model::FourPointOne + | Model::FourPointOneMini + | Model::FourPointOneNano + | Model::Five + | Model::FiveMini + | Model::FiveNano + | Model::O1 + | Model::O3 + | Model::O4Mini => true, + Model::ThreePointFiveTurbo + | Model::Four + | Model::FourTurbo + | Model::O3Mini + | Model::Custom { .. } => false, + } } fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { @@ -350,7 +375,9 @@ impl LanguageModel for OpenAiLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + self.model.reasoning_effort(), ); let completions = self.stream_completion(request, cx); async move { @@ -365,7 +392,9 @@ pub fn into_open_ai( request: LanguageModelRequest, model_id: &str, supports_parallel_tool_calls: bool, + supports_prompt_cache_key: bool, max_output_tokens: Option, + reasoning_effort: Option, ) -> open_ai::Request { let stream = !model_id.starts_with("o1-"); @@ -375,7 +404,7 @@ pub fn into_open_ai( match content { MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { add_message_content_part( - open_ai::MessagePart::Text { text: text }, + open_ai::MessagePart::Text { text }, message.role, &mut messages, ) @@ -455,6 +484,11 @@ pub fn into_open_ai( } else { None }, + prompt_cache_key: if supports_prompt_cache_key { + request.thread_id + } else { + None + }, tools: request .tools .into_iter() @@ -471,6 +505,7 @@ pub fn into_open_ai( LanguageModelToolChoice::Any => open_ai::ToolChoice::Required, LanguageModelToolChoice::None => open_ai::ToolChoice::None, }), + reasoning_effort, } } @@ -869,7 +904,7 @@ impl Render for ConfigurationView { .child( Button::new("docs", "Learn More") .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| { cx.open_url("https://zed.dev/docs/ai/llm-providers#openai-api-compatible") diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 38bd7cee06db915cc3d6cc4d8a39309fab879b44..55df534cc9416149ca5574e16f0230f1c8160220 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -38,6 +38,27 @@ pub struct AvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + #[serde(default)] + pub capabilities: ModelCapabilities, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct ModelCapabilities { + pub tools: bool, + pub images: bool, + pub parallel_tool_calls: bool, + pub prompt_cache_key: bool, +} + +impl Default for ModelCapabilities { + fn default() -> Self { + Self { + tools: true, + images: false, + parallel_tool_calls: false, + prompt_cache_key: false, + } + } } pub struct OpenAiCompatibleLanguageModelProvider { @@ -66,7 +87,7 @@ impl State { let api_url = self.settings.api_url.clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -82,7 +103,7 @@ impl State { let api_url = self.settings.api_url.clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .log_err(); this.update(cx, |this, cx| { @@ -105,7 +126,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -222,7 +243,12 @@ impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } @@ -293,17 +319,17 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { } fn supports_tools(&self) -> bool { - true + self.model.capabilities.tools } fn supports_images(&self) -> bool { - false + self.model.capabilities.images } fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { - LanguageModelToolChoice::Auto => true, - LanguageModelToolChoice::Any => true, + LanguageModelToolChoice::Auto => self.model.capabilities.tools, + LanguageModelToolChoice::Any => self.model.capabilities.tools, LanguageModelToolChoice::None => true, } } @@ -355,7 +381,14 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { LanguageModelCompletionError, >, > { - let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens()); + let request = into_open_ai( + request, + &self.model.name, + self.model.capabilities.parallel_tool_calls, + self.model.capabilities.prompt_cache_key, + self.max_output_tokens(), + None, + ); let completions = self.stream_completion(request, cx); async move { let mapper = OpenAiEventMapper::new(); diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 3a492086f16e1f9b53a196b7bb2e9817a3cac0e7..8f2abfce35852c617ddee22de18432908660fe95 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -112,7 +112,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -131,7 +131,7 @@ impl State { .clone(); cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .log_err(); this.update(cx, |this, cx| { @@ -157,7 +157,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -306,7 +306,12 @@ impl LanguageModelProvider for OpenRouterLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 037ce467d03eb00a3e1ed272a6d2de80bb51e200..84f3175d1e5493fd55cafd2ea9c4a0604d2a97b4 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -71,7 +71,7 @@ impl State { }; cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -92,7 +92,7 @@ impl State { }; cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .log_err(); this.update(cx, |this, cx| { @@ -119,7 +119,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -230,7 +230,12 @@ impl LanguageModelProvider for VercelLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } @@ -355,7 +360,9 @@ impl LanguageModel for VercelLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + None, ); let completions = self.stream_completion(request, cx); async move { diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 5f6034571b54fd30baa6769881f5d27bbcaf162f..b37a55e19f389bcf7e5ccd09b52e2b3f6b7ff094 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -71,7 +71,7 @@ impl State { }; cx.spawn(async move |this, cx| { credentials_provider - .delete_credentials(&api_url, &cx) + .delete_credentials(&api_url, cx) .await .log_err(); this.update(cx, |this, cx| { @@ -92,7 +92,7 @@ impl State { }; cx.spawn(async move |this, cx| { credentials_provider - .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx) .await .log_err(); this.update(cx, |this, cx| { @@ -119,7 +119,7 @@ impl State { (api_key, true) } else { let (_, api_key) = credentials_provider - .read_credentials(&api_url, &cx) + .read_credentials(&api_url, cx) .await? .ok_or(AuthenticateError::CredentialsNotFound)?; ( @@ -230,7 +230,12 @@ impl LanguageModelProvider for XAiLanguageModelProvider { self.state.update(cx, |state, cx| state.authenticate(cx)) } - fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) .into() } @@ -359,7 +364,9 @@ impl LanguageModel for XAiLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + None, ); let completions = self.stream_completion(request, cx); async move { diff --git a/crates/language_models/src/ui/instruction_list_item.rs b/crates/language_models/src/ui/instruction_list_item.rs index 794a85b400aca3d7dadd201ac76a3f07fd0a97f0..bdb5fbe242ee902dc98a37addfaa0f103ef9ad20 100644 --- a/crates/language_models/src/ui/instruction_list_item.rs +++ b/crates/language_models/src/ui/instruction_list_item.rs @@ -37,7 +37,7 @@ impl IntoElement for InstructionListItem { let item_content = if let (Some(button_label), Some(button_link)) = (self.button_label, self.button_link) { - let link = button_link.clone(); + let link = button_link; let unique_id = SharedString::from(format!("{}-button", self.label)); h_flex() @@ -47,7 +47,7 @@ impl IntoElement for InstructionListItem { Button::new(unique_id, button_label) .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| cx.open_url(&link)), ) diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index c5c5eceab54f2c34f4b1e2aae1b04f85fc5d9ab6..56924c4cd2d54c64436a5ccaa7dabfe4c53ff0ec 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -28,10 +28,10 @@ impl ActiveBufferLanguage { self.active_language = Some(None); let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) { - if let Some(language) = buffer.read(cx).language() { - self.active_language = Some(Some(language.name())); - } + if let Some((_, buffer, _)) = editor.active_excerpt(cx) + && let Some(language) = buffer.read(cx).language() + { + self.active_language = Some(Some(language.name())); } cx.notify(); diff --git a/crates/language_tools/src/key_context_view.rs b/crates/language_tools/src/key_context_view.rs index 88131781ec3af336d3ae793cf1820e5bcf731605..057259d114f88f785c1a016d82f443b2ee2be644 100644 --- a/crates/language_tools/src/key_context_view.rs +++ b/crates/language_tools/src/key_context_view.rs @@ -71,12 +71,10 @@ impl KeyContextView { } else { None } + } else if this.action_matches(&e.action, binding.action()) { + Some(true) } else { - if this.action_matches(&e.action, binding.action()) { - Some(true) - } else { - Some(false) - } + Some(false) }; let predicate = if let Some(predicate) = binding.predicate() { format!("{}", predicate) @@ -98,9 +96,7 @@ impl KeyContextView { cx.notify(); }); let sub2 = cx.observe_pending_input(window, |this, window, cx| { - this.pending_keystrokes = window - .pending_input_keystrokes() - .map(|k| k.iter().cloned().collect()); + this.pending_keystrokes = window.pending_input_keystrokes().map(|k| k.to_vec()); if this.pending_keystrokes.is_some() { this.last_keystrokes.take(); } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 606f3a3f0e5f91b5fb8856cabce240d094f3cf49..43c0365291c349a8e456c9172e46d121a6f23fe7 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -254,35 +254,35 @@ impl LogStore { let copilot_subscription = Copilot::global(cx).map(|copilot| { let copilot = &copilot; cx.subscribe(copilot, |this, copilot, edit_prediction_event, cx| { - if let copilot::Event::CopilotLanguageServerStarted = edit_prediction_event { - if let Some(server) = copilot.read(cx).language_server() { - let server_id = server.server_id(); - let weak_this = cx.weak_entity(); - this.copilot_log_subscription = - Some(server.on_notification::( - move |params, cx| { - weak_this - .update(cx, |this, cx| { - this.add_language_server_log( - server_id, - MessageType::LOG, - ¶ms.message, - cx, - ); - }) - .ok(); - }, - )); - let name = LanguageServerName::new_static("copilot"); - this.add_language_server( - LanguageServerKind::Global, - server.server_id(), - Some(name), - None, - Some(server.clone()), - cx, - ); - } + if let copilot::Event::CopilotLanguageServerStarted = edit_prediction_event + && let Some(server) = copilot.read(cx).language_server() + { + let server_id = server.server_id(); + let weak_this = cx.weak_entity(); + this.copilot_log_subscription = + Some(server.on_notification::( + move |params, cx| { + weak_this + .update(cx, |this, cx| { + this.add_language_server_log( + server_id, + MessageType::LOG, + ¶ms.message, + cx, + ); + }) + .ok(); + }, + )); + let name = LanguageServerName::new_static("copilot"); + this.add_language_server( + LanguageServerKind::Global, + server.server_id(), + Some(name), + None, + Some(server.clone()), + cx, + ); } }) }); @@ -406,10 +406,7 @@ impl LogStore { server_state.worktree_id = Some(worktree_id); } - if let Some(server) = server - .clone() - .filter(|_| server_state.io_logs_subscription.is_none()) - { + if let Some(server) = server.filter(|_| server_state.io_logs_subscription.is_none()) { let io_tx = self.io_tx.clone(); let server_id = server.server_id(); server_state.io_logs_subscription = Some(server.on_io(move |io_kind, message| { @@ -661,7 +658,7 @@ impl LogStore { IoKind::StdOut => true, IoKind::StdIn => false, IoKind::StdErr => { - self.add_language_server_log(language_server_id, MessageType::LOG, &message, cx); + self.add_language_server_log(language_server_id, MessageType::LOG, message, cx); return Some(()); } }; @@ -733,16 +730,14 @@ impl LspLogView { let first_server_id_for_project = store.read(cx).server_ids_for_project(&weak_project).next(); if let Some(current_lsp) = this.current_server_id { - if !store.read(cx).language_servers.contains_key(¤t_lsp) { - if let Some(server_id) = first_server_id_for_project { - match this.active_entry_kind { - LogKind::Rpc => { - this.show_rpc_trace_for_server(server_id, window, cx) - } - LogKind::Trace => this.show_trace_for_server(server_id, window, cx), - LogKind::Logs => this.show_logs_for_server(server_id, window, cx), - LogKind::ServerInfo => this.show_server_info(server_id, window, cx), - } + if !store.read(cx).language_servers.contains_key(¤t_lsp) + && let Some(server_id) = first_server_id_for_project + { + match this.active_entry_kind { + LogKind::Rpc => this.show_rpc_trace_for_server(server_id, window, cx), + LogKind::Trace => this.show_trace_for_server(server_id, window, cx), + LogKind::Logs => this.show_logs_for_server(server_id, window, cx), + LogKind::ServerInfo => this.show_server_info(server_id, window, cx), } } } else if let Some(server_id) = first_server_id_for_project { @@ -776,21 +771,17 @@ impl LspLogView { ], cx, ); - if text.len() > 1024 { - if let Some((fold_offset, _)) = + if text.len() > 1024 + && let Some((fold_offset, _)) = text.char_indices().dropping(1024).next() - { - if fold_offset < text.len() { - editor.fold_ranges( - vec![ - last_offset + fold_offset..last_offset + text.len(), - ], - false, - window, - cx, - ); - } - } + && fold_offset < text.len() + { + editor.fold_ranges( + vec![last_offset + fold_offset..last_offset + text.len()], + false, + window, + cx, + ); } if newest_cursor_is_at_end { @@ -936,7 +927,7 @@ impl LspLogView { let state = log_store.language_servers.get(&server_id)?; Some(LogMenuItem { server_id, - server_name: name.clone(), + server_name: name, server_kind: state.kind.clone(), worktree_root_name: "supplementary".to_string(), rpc_trace_enabled: state.rpc_state.is_some(), @@ -1311,14 +1302,14 @@ impl ToolbarItemView for LspLogToolbarItemView { _: &mut Window, cx: &mut Context, ) -> workspace::ToolbarItemLocation { - if let Some(item) = active_pane_item { - if let Some(log_view) = item.downcast::() { - self.log_view = Some(log_view.clone()); - self._log_view_subscription = Some(cx.observe(&log_view, |_, _, cx| { - cx.notify(); - })); - return ToolbarItemLocation::PrimaryLeft; - } + if let Some(item) = active_pane_item + && let Some(log_view) = item.downcast::() + { + self.log_view = Some(log_view.clone()); + self._log_view_subscription = Some(cx.observe(&log_view, |_, _, cx| { + cx.notify(); + })); + return ToolbarItemLocation::PrimaryLeft; } self.log_view = None; self._log_view_subscription = None; @@ -1358,7 +1349,7 @@ impl Render for LspLogToolbarItemView { }) .collect(); - let log_toolbar_view = cx.entity().clone(); + let log_toolbar_view = cx.entity(); let lsp_menu = PopoverMenu::new("LspLogView") .anchor(Corner::TopLeft) @@ -1533,7 +1524,7 @@ impl Render for LspLogToolbarItemView { .icon_color(Color::Muted), ) .menu({ - let log_view = log_view.clone(); + let log_view = log_view; move |window, cx| { let id = log_view.read(cx).current_server_id?; @@ -1601,7 +1592,7 @@ impl Render for LspLogToolbarItemView { .icon_color(Color::Muted), ) .menu({ - let log_view = log_view.clone(); + let log_view = log_view; move |window, cx| { let id = log_view.read(cx).current_server_id?; diff --git a/crates/language_tools/src/lsp_tool.rs b/crates/language_tools/src/lsp_tool.rs index 50547253a92b8c23d0530326faf916e56363dcd9..dd3e80212fda08f43718a664d2cfd6d377182273 100644 --- a/crates/language_tools/src/lsp_tool.rs +++ b/crates/language_tools/src/lsp_tool.rs @@ -349,7 +349,6 @@ impl LanguageServerState { .detach(); } else { cx.propagate(); - return; } } }, @@ -523,7 +522,6 @@ impl LspTool { if ProjectSettings::get_global(cx).global_lsp_settings.button { if lsp_tool.lsp_menu.is_none() { lsp_tool.refresh_lsp_menu(true, window, cx); - return; } } else if lsp_tool.lsp_menu.take().is_some() { cx.notify(); @@ -1007,7 +1005,7 @@ impl Render for LspTool { (None, "All Servers Operational") }; - let lsp_tool = cx.entity().clone(); + let lsp_tool = cx.entity(); div().child( PopoverMenu::new("lsp-tool") diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index eadba2c1d2f4c96c4f0ad2646c2e9957bbae3bdc..cf84ac34c4af6d04895ba5d1e22c262a1ef8f03c 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -103,12 +103,11 @@ impl SyntaxTreeView { window: &mut Window, cx: &mut Context, ) { - if let Some(item) = active_item { - if item.item_id() != cx.entity_id() { - if let Some(editor) = item.act_as::(cx) { - self.set_editor(editor, window, cx); - } - } + if let Some(item) = active_item + && item.item_id() != cx.entity_id() + && let Some(editor) = item.act_as::(cx) + { + self.set_editor(editor, window, cx); } } @@ -157,7 +156,7 @@ impl SyntaxTreeView { .buffer_snapshot .range_to_buffer_ranges(selection_range) .pop()?; - let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap().clone(); + let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap(); Some((buffer, range, excerpt_id)) })?; @@ -456,7 +455,7 @@ impl SyntaxTreeToolbarItemView { let active_layer = buffer_state.active_layer.clone()?; let active_buffer = buffer_state.buffer.read(cx).snapshot(); - let view = cx.entity().clone(); + let view = cx.entity(); Some( PopoverMenu::new("Syntax Tree") .trigger(Self::render_header(&active_layer)) @@ -537,12 +536,12 @@ impl ToolbarItemView for SyntaxTreeToolbarItemView { window: &mut Window, cx: &mut Context, ) -> ToolbarItemLocation { - if let Some(item) = active_pane_item { - if let Some(view) = item.downcast::() { - self.tree_view = Some(view.clone()); - self.subscription = Some(cx.observe_in(&view, window, |_, _, _, cx| cx.notify())); - return ToolbarItemLocation::PrimaryLeft; - } + if let Some(item) = active_pane_item + && let Some(view) = item.downcast::() + { + self.tree_view = Some(view.clone()); + self.subscription = Some(cx.observe_in(&view, window, |_, _, _, cx| cx.notify())); + return ToolbarItemLocation::PrimaryLeft; } self.tree_view = None; self.subscription = None; diff --git a/crates/languages/src/bash/overrides.scm b/crates/languages/src/bash/overrides.scm new file mode 100644 index 0000000000000000000000000000000000000000..81fec9a5f57b28fc67b4781ec37df43559e21dc9 --- /dev/null +++ b/crates/languages/src/bash/overrides.scm @@ -0,0 +1,2 @@ +(comment) @comment.inclusive +(string) @string diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index a55d8ff998a8d92625f5e4a319e01dd3a5735ec4..2820f55a497c8b77b679a3ab5368cae6901c89e6 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -22,13 +22,13 @@ impl CLspAdapter { #[async_trait(?Send)] impl super::LspAdapter for CLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; @@ -71,8 +71,11 @@ impl super::LspAdapter for CLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { - let GitHubLspBinaryVersion { name, url, digest } = - &*version.downcast::().unwrap(); + let GitHubLspBinaryVersion { + name, + url, + digest: expected_digest, + } = *version.downcast::().unwrap(); let version_dir = container_dir.join(format!("clangd_{name}")); let binary_path = version_dir.join("bin/clangd"); @@ -99,7 +102,9 @@ impl super::LspAdapter for CLspAdapter { log::warn!("Unable to run {binary_path:?} asset, redownloading: {err}",) }) }; - if let (Some(actual_digest), Some(expected_digest)) = (&metadata.digest, digest) { + if let (Some(actual_digest), Some(expected_digest)) = + (&metadata.digest, &expected_digest) + { if actual_digest == expected_digest { if validity_check().await.is_ok() { return Ok(binary); @@ -115,8 +120,8 @@ impl super::LspAdapter for CLspAdapter { } download_server_binary( delegate, - url, - digest.as_deref(), + &url, + expected_digest.as_deref(), &container_dir, AssetKind::Zip, ) @@ -125,7 +130,7 @@ impl super::LspAdapter for CLspAdapter { GithubBinaryMetadata::write_to_file( &GithubBinaryMetadata { metadata_version: 1, - digest: digest.clone(), + digest: expected_digest, }, &metadata_path, ) @@ -248,8 +253,7 @@ impl super::LspAdapter for CLspAdapter { .grammar() .and_then(|g| g.highlight_id_for_name(highlight_name?)) { - let mut label = - CodeLabel::plain(label.to_string(), completion.filter_text.as_deref()); + let mut label = CodeLabel::plain(label, completion.filter_text.as_deref()); label.runs.push(( 0..label.text.rfind('(').unwrap_or(label.text.len()), highlight_id, @@ -259,10 +263,7 @@ impl super::LspAdapter for CLspAdapter { } _ => {} } - Some(CodeLabel::plain( - label.to_string(), - completion.filter_text.as_deref(), - )) + Some(CodeLabel::plain(label, completion.filter_text.as_deref())) } async fn label_for_symbol( diff --git a/crates/languages/src/cpp/outline.scm b/crates/languages/src/cpp/outline.scm index 448fe35220b32a0ed1d0bd6517d02b4964d09d40..c8973665581472d59bd478f8a95d4881f7b431e8 100644 --- a/crates/languages/src/cpp/outline.scm +++ b/crates/languages/src/cpp/outline.scm @@ -149,7 +149,9 @@ parameters: (parameter_list "(" @context ")" @context))) - ] - (type_qualifier)? @context) @item + ; Fields declarations may define multiple fields, and so @item is on the + ; declarator so they each get distinct ranges. + ] @item + (type_qualifier)? @context) (comment) @annotation diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 7725e079be31cacc3e3bc5e30ec48b6ab8d2d4d4..2480d4026883ab5b6e3af7f8a4d8bbbb59757879 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -2,9 +2,9 @@ use anyhow::{Context as _, Result}; use async_trait::async_trait; use futures::StreamExt; use gpui::AsyncApp; -use language::{LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; +use language::{LspAdapter, LspAdapterDelegate, Toolchain}; use lsp::{LanguageServerBinary, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use project::{Fs, lsp_store::language_server_settings}; use serde_json::json; use smol::fs; @@ -43,7 +43,7 @@ impl LspAdapter for CssLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate @@ -103,7 +103,12 @@ impl LspAdapter for CssLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + container_dir, + VersionStrategy::Latest(version), + ) .await; if should_install_language_server { @@ -139,7 +144,7 @@ impl LspAdapter for CssLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let mut default_config = json!({ diff --git a/crates/languages/src/github_download.rs b/crates/languages/src/github_download.rs index a3cd0a964b31c17e73a72317efc0616474dc846d..766c894fbb2b660778f09933b4facd2114ebb5bf 100644 --- a/crates/languages/src/github_download.rs +++ b/crates/languages/src/github_download.rs @@ -18,9 +18,8 @@ impl GithubBinaryMetadata { let metadata_content = async_fs::read_to_string(metadata_path) .await .with_context(|| format!("reading metadata file at {metadata_path:?}"))?; - let metadata: GithubBinaryMetadata = serde_json::from_str(&metadata_content) - .with_context(|| format!("parsing metadata file at {metadata_path:?}"))?; - Ok(metadata) + serde_json::from_str(&metadata_content) + .with_context(|| format!("parsing metadata file at {metadata_path:?}")) } pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> { @@ -62,6 +61,7 @@ pub(crate) async fn download_server_binary( format!("saving archive contents into the temporary file for {url}",) })?; let asset_sha_256 = format!("{:x}", writer.hasher.finalize()); + anyhow::ensure!( asset_sha_256 == expected_sha_256, "{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}", @@ -96,7 +96,7 @@ async fn stream_response_archive( AssetKind::TarGz => extract_tar_gz(destination_path, url, response).await?, AssetKind::Gz => extract_gz(destination_path, url, response).await?, AssetKind::Zip => { - util::archive::extract_zip(&destination_path, response).await?; + util::archive::extract_zip(destination_path, response).await?; } }; Ok(()) @@ -113,11 +113,11 @@ async fn stream_file_archive( AssetKind::Gz => extract_gz(destination_path, url, file_archive).await?, #[cfg(not(windows))] AssetKind::Zip => { - util::archive::extract_seekable_zip(&destination_path, file_archive).await?; + util::archive::extract_seekable_zip(destination_path, file_archive).await?; } #[cfg(windows)] AssetKind::Zip => { - util::archive::extract_zip(&destination_path, file_archive).await?; + util::archive::extract_zip(destination_path, file_archive).await?; } }; Ok(()) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 16c1b67203e673ddb8c20c110d46ea7bf062ea43..24e2ca2f56fff5ba1a3d92ca5e0bf16ac1a9463b 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -53,7 +53,7 @@ const BINARY: &str = if cfg!(target_os = "windows") { #[async_trait(?Send)] impl super::LspAdapter for GoLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn fetch_latest_server_version( @@ -75,7 +75,7 @@ impl super::LspAdapter for GoLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; @@ -131,19 +131,19 @@ impl super::LspAdapter for GoLspAdapter { if let Some(version) = *version { let binary_path = container_dir.join(format!("gopls_{version}_go_{go_version}")); - if let Ok(metadata) = fs::metadata(&binary_path).await { - if metadata.is_file() { - remove_matching(&container_dir, |entry| { - entry != binary_path && entry.file_name() != Some(OsStr::new("gobin")) - }) - .await; - - return Ok(LanguageServerBinary { - path: binary_path.to_path_buf(), - arguments: server_binary_arguments(), - env: None, - }); - } + if let Ok(metadata) = fs::metadata(&binary_path).await + && metadata.is_file() + { + remove_matching(&container_dir, |entry| { + entry != binary_path && entry.file_name() != Some(OsStr::new("gobin")) + }) + .await; + + return Ok(LanguageServerBinary { + path: binary_path.to_path_buf(), + arguments: server_binary_arguments(), + env: None, + }); } } else if let Some(path) = this .cached_server_binary(container_dir.clone(), delegate) @@ -452,7 +452,7 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option = runnables + .iter() + .flat_map(|r| &r.runnable.tags) + .map(|tag| tag.0.to_string()) + .collect(); + + assert!( + tag_strings.contains(&"go-test".to_string()), + "Should find go-test tag, found: {:?}", + tag_strings + ); assert!( - runnables.len() == 2, - "Should find test function and subtest with double quotes, found: {}", - runnables.len() + tag_strings.contains(&"go-subtest".to_string()), + "Should find go-subtest tag, found: {:?}", + tag_strings ); let buffer = cx.new(|cx| { @@ -860,10 +904,299 @@ mod tests { .collect() }); + let tag_strings: Vec = runnables + .iter() + .flat_map(|r| &r.runnable.tags) + .map(|tag| tag.0.to_string()) + .collect(); + + assert!( + tag_strings.contains(&"go-test".to_string()), + "Should find go-test tag, found: {:?}", + tag_strings + ); + assert!( + tag_strings.contains(&"go-subtest".to_string()), + "Should find go-subtest tag, found: {:?}", + tag_strings + ); + } + + #[gpui::test] + fn test_go_table_test_slice_detection(cx: &mut TestAppContext) { + let language = language("go", tree_sitter_go::LANGUAGE.into()); + + let table_test = r#" + package main + + import "testing" + + func TestExample(t *testing.T) { + _ = "some random string" + + testCases := []struct{ + name string + anotherStr string + }{ + { + name: "test case 1", + anotherStr: "foo", + }, + { + name: "test case 2", + anotherStr: "bar", + }, + } + + notATableTest := []struct{ + name string + }{ + { + name: "some string", + }, + { + name: "some other string", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // test code here + }) + } + } + "#; + + let buffer = + cx.new(|cx| crate::Buffer::local(table_test, cx).with_language(language.clone(), cx)); + cx.executor().run_until_parked(); + + let runnables: Vec<_> = buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + snapshot.runnable_ranges(0..table_test.len()).collect() + }); + + let tag_strings: Vec = runnables + .iter() + .flat_map(|r| &r.runnable.tags) + .map(|tag| tag.0.to_string()) + .collect(); + + assert!( + tag_strings.contains(&"go-test".to_string()), + "Should find go-test tag, found: {:?}", + tag_strings + ); + assert!( + tag_strings.contains(&"go-table-test-case".to_string()), + "Should find go-table-test-case tag, found: {:?}", + tag_strings + ); + + let go_test_count = tag_strings.iter().filter(|&tag| tag == "go-test").count(); + let go_table_test_count = tag_strings + .iter() + .filter(|&tag| tag == "go-table-test-case") + .count(); + + assert!( + go_test_count == 1, + "Should find exactly 1 go-test, found: {}", + go_test_count + ); + assert!( + go_table_test_count == 2, + "Should find exactly 2 go-table-test-case, found: {}", + go_table_test_count + ); + } + + #[gpui::test] + fn test_go_table_test_slice_ignored(cx: &mut TestAppContext) { + let language = language("go", tree_sitter_go::LANGUAGE.into()); + + let table_test = r#" + package main + + func Example() { + _ = "some random string" + + notATableTest := []struct{ + name string + }{ + { + name: "some string", + }, + { + name: "some other string", + }, + } + } + "#; + + let buffer = + cx.new(|cx| crate::Buffer::local(table_test, cx).with_language(language.clone(), cx)); + cx.executor().run_until_parked(); + + let runnables: Vec<_> = buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + snapshot.runnable_ranges(0..table_test.len()).collect() + }); + + let tag_strings: Vec = runnables + .iter() + .flat_map(|r| &r.runnable.tags) + .map(|tag| tag.0.to_string()) + .collect(); + + assert!( + !tag_strings.contains(&"go-test".to_string()), + "Should find go-test tag, found: {:?}", + tag_strings + ); + assert!( + !tag_strings.contains(&"go-table-test-case".to_string()), + "Should find go-table-test-case tag, found: {:?}", + tag_strings + ); + } + + #[gpui::test] + fn test_go_table_test_map_detection(cx: &mut TestAppContext) { + let language = language("go", tree_sitter_go::LANGUAGE.into()); + + let table_test = r#" + package main + + import "testing" + + func TestExample(t *testing.T) { + _ = "some random string" + + testCases := map[string]struct { + someStr string + fail bool + }{ + "test failure": { + someStr: "foo", + fail: true, + }, + "test success": { + someStr: "bar", + fail: false, + }, + } + + notATableTest := map[string]struct { + someStr string + }{ + "some string": { + someStr: "foo", + }, + "some other string": { + someStr: "bar", + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + // test code here + }) + } + } + "#; + + let buffer = + cx.new(|cx| crate::Buffer::local(table_test, cx).with_language(language.clone(), cx)); + cx.executor().run_until_parked(); + + let runnables: Vec<_> = buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + snapshot.runnable_ranges(0..table_test.len()).collect() + }); + + let tag_strings: Vec = runnables + .iter() + .flat_map(|r| &r.runnable.tags) + .map(|tag| tag.0.to_string()) + .collect(); + + assert!( + tag_strings.contains(&"go-test".to_string()), + "Should find go-test tag, found: {:?}", + tag_strings + ); + assert!( + tag_strings.contains(&"go-table-test-case".to_string()), + "Should find go-table-test-case tag, found: {:?}", + tag_strings + ); + + let go_test_count = tag_strings.iter().filter(|&tag| tag == "go-test").count(); + let go_table_test_count = tag_strings + .iter() + .filter(|&tag| tag == "go-table-test-case") + .count(); + + assert!( + go_test_count == 1, + "Should find exactly 1 go-test, found: {}", + go_test_count + ); + assert!( + go_table_test_count == 2, + "Should find exactly 2 go-table-test-case, found: {}", + go_table_test_count + ); + } + + #[gpui::test] + fn test_go_table_test_map_ignored(cx: &mut TestAppContext) { + let language = language("go", tree_sitter_go::LANGUAGE.into()); + + let table_test = r#" + package main + + func Example() { + _ = "some random string" + + notATableTest := map[string]struct { + someStr string + }{ + "some string": { + someStr: "foo", + }, + "some other string": { + someStr: "bar", + }, + } + } + "#; + + let buffer = + cx.new(|cx| crate::Buffer::local(table_test, cx).with_language(language.clone(), cx)); + cx.executor().run_until_parked(); + + let runnables: Vec<_> = buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + snapshot.runnable_ranges(0..table_test.len()).collect() + }); + + let tag_strings: Vec = runnables + .iter() + .flat_map(|r| &r.runnable.tags) + .map(|tag| tag.0.to_string()) + .collect(); + + assert!( + !tag_strings.contains(&"go-test".to_string()), + "Should find go-test tag, found: {:?}", + tag_strings + ); assert!( - runnables.len() == 2, - "Should find test function and subtest with backticks, found: {}", - runnables.len() + !tag_strings.contains(&"go-table-test-case".to_string()), + "Should find go-table-test-case tag, found: {:?}", + tag_strings ); } diff --git a/crates/languages/src/go/outline.scm b/crates/languages/src/go/outline.scm index e37ae7e5723027af3227f947898b301c0305a4f5..c745f55aff7dcd4b3bfd802884db7a985c1387fa 100644 --- a/crates/languages/src/go/outline.scm +++ b/crates/languages/src/go/outline.scm @@ -1,4 +1,5 @@ (comment) @annotation + (type_declaration "type" @context [ @@ -42,13 +43,13 @@ (var_declaration "var" @context [ + ; The declaration may define multiple variables, and so @item is on + ; the identifier so they get distinct ranges. (var_spec - name: (identifier) @name) @item + name: (identifier) @name @item) (var_spec_list - "(" (var_spec - name: (identifier) @name) @item - ")" + name: (identifier) @name @item) ) ] ) @@ -60,5 +61,7 @@ "(" @context ")" @context)) @item +; Fields declarations may define multiple fields, and so @item is on the +; declarator so they each get distinct ranges. (field_declaration - name: (_) @name) @item + name: (_) @name @item) diff --git a/crates/languages/src/go/runnables.scm b/crates/languages/src/go/runnables.scm index 6418cd04d8d69c2bb97434053c93f591aed55c68..f56262f799c3d73c8eaadd03665565f75add27ba 100644 --- a/crates/languages/src/go/runnables.scm +++ b/crates/languages/src/go/runnables.scm @@ -91,3 +91,103 @@ ) @_ (#set! tag go-main) ) + +; Table test cases - slice and map +( + (short_var_declaration + left: (expression_list (identifier) @_collection_var) + right: (expression_list + (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string") + ) + ] + body: (literal_value + [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name + ) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ] + ) + ) + ) + ) + (keyed_element + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ] + ) + ) + ] + ) + ) + ) + ) + (for_statement + (range_clause + left: (expression_list + [ + ( + (identifier) + (identifier) @_loop_var + ) + (identifier) @_loop_var + ] + ) + right: (identifier) @_range_var + (#eq? @_range_var @_collection_var) + ) + body: (block + (expression_statement + (call_expression + function: (selector_expression + operand: (identifier) @_t_var + field: (field_identifier) @_run_method + (#eq? @_run_method "Run") + ) + arguments: (argument_list + . + [ + (selector_expression + operand: (identifier) @_tc_var + (#eq? @_tc_var @_loop_var) + field: (field_identifier) @_field_check + (#eq? @_field_check @_field_name) + ) + (identifier) @_arg_var + (#eq? @_arg_var @_loop_var) + ] + . + (func_literal + parameters: (parameter_list + (parameter_declaration + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T") + ) + ) + ) + ) + ) + ) + ) + ) + ) + ) @_ + (#set! tag go-table-test-case) +) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm index 026c71e1f91d323ff2370828f330e4a4944e74db..ca16c27a27be3e1e09ced16cd2eef7aa28345f9e 100644 --- a/crates/languages/src/javascript/outline.scm +++ b/crates/languages/src/javascript/outline.scm @@ -31,12 +31,16 @@ (export_statement (lexical_declaration ["let" "const"] @context + ; Multiple names may be exported - @item is on the declarator to keep + ; ranges distinct. (variable_declarator name: (_) @name) @item))) (program (lexical_declaration ["let" "const"] @context + ; Multiple names may be defined - @item is on the declarator to keep + ; ranges distinct. (variable_declarator name: (_) @name) @item)) diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index ca82bb2431f5408e948005ffc9fb705809a93087..4fcf865568b11ea0f8b40d6a7d57cc354e85a680 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -8,11 +8,11 @@ use futures::StreamExt; use gpui::{App, AsyncApp, Task}; use http_client::github::{GitHubLspBinaryVersion, latest_github_release}; use language::{ - ContextProvider, LanguageName, LanguageRegistry, LanguageToolchainStore, LocalFile as _, - LspAdapter, LspAdapterDelegate, + ContextProvider, LanguageName, LanguageRegistry, LocalFile as _, LspAdapter, + LspAdapterDelegate, Toolchain, }; use lsp::{LanguageServerBinary, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use project::{Fs, lsp_store::language_server_settings}; use serde_json::{Value, json}; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; @@ -234,7 +234,7 @@ impl JsonLspAdapter { schemas .as_array_mut() .unwrap() - .extend(cx.all_action_names().into_iter().map(|&name| { + .extend(cx.all_action_names().iter().map(|&name| { project::lsp_store::json_language_server_ext::url_schema_for_action(name) })); @@ -280,7 +280,7 @@ impl JsonLspAdapter { ) })?; writer.replace(config.clone()); - return Ok(config); + Ok(config) } } @@ -303,7 +303,7 @@ impl LspAdapter for JsonLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate @@ -340,7 +340,12 @@ impl LspAdapter for JsonLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + container_dir, + VersionStrategy::Latest(version), + ) .await; if should_install_language_server { @@ -399,7 +404,7 @@ impl LspAdapter for JsonLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let mut config = self.get_or_init_workspace_config(cx).await?; @@ -483,7 +488,7 @@ impl NodeVersionAdapter { #[async_trait(?Send)] impl LspAdapter for NodeVersionAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn fetch_latest_server_version( @@ -524,7 +529,7 @@ impl LspAdapter for NodeVersionAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; diff --git a/crates/languages/src/jsonc/overrides.scm b/crates/languages/src/jsonc/overrides.scm index cc966ad4c13e0cc7f7fc27a1152b461f24e3c6b0..81fec9a5f57b28fc67b4781ec37df43559e21dc9 100644 --- a/crates/languages/src/jsonc/overrides.scm +++ b/crates/languages/src/jsonc/overrides.scm @@ -1 +1,2 @@ +(comment) @comment.inclusive (string) @string diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 195ba79e1d0e96acea7ac1a53590c1a947334069..d391e67d33fe90ac7e678c1458b8985e221fafc3 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -1,6 +1,6 @@ use anyhow::Context as _; use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; -use gpui::{App, UpdateGlobal}; +use gpui::{App, SharedString, UpdateGlobal}; use node_runtime::NodeRuntime; use python::PyprojectTomlManifestProvider; use rust::CargoManifestProvider; @@ -104,7 +104,7 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { let typescript_context = Arc::new(typescript::TypeScriptContextProvider::new()); let typescript_lsp_adapter = Arc::new(typescript::TypeScriptLspAdapter::new(node.clone())); let vtsls_adapter = Arc::new(vtsls::VtslsLspAdapter::new(node.clone())); - let yaml_lsp_adapter = Arc::new(yaml::YamlLspAdapter::new(node.clone())); + let yaml_lsp_adapter = Arc::new(yaml::YamlLspAdapter::new(node)); let built_in_languages = [ LanguageInfo { @@ -119,12 +119,12 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { }, LanguageInfo { name: "cpp", - adapters: vec![c_lsp_adapter.clone()], + adapters: vec![c_lsp_adapter], ..Default::default() }, LanguageInfo { name: "css", - adapters: vec![css_lsp_adapter.clone()], + adapters: vec![css_lsp_adapter], ..Default::default() }, LanguageInfo { @@ -146,20 +146,20 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { }, LanguageInfo { name: "gowork", - adapters: vec![go_lsp_adapter.clone()], - context: Some(go_context_provider.clone()), + adapters: vec![go_lsp_adapter], + context: Some(go_context_provider), ..Default::default() }, LanguageInfo { name: "json", - adapters: vec![json_lsp_adapter.clone(), node_version_lsp_adapter.clone()], + adapters: vec![json_lsp_adapter.clone(), node_version_lsp_adapter], context: Some(json_context_provider.clone()), ..Default::default() }, LanguageInfo { name: "jsonc", - adapters: vec![json_lsp_adapter.clone()], - context: Some(json_context_provider.clone()), + adapters: vec![json_lsp_adapter], + context: Some(json_context_provider), ..Default::default() }, LanguageInfo { @@ -174,14 +174,16 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { }, LanguageInfo { name: "python", - adapters: vec![python_lsp_adapter.clone(), py_lsp_adapter.clone()], + adapters: vec![python_lsp_adapter, py_lsp_adapter], context: Some(python_context_provider), toolchain: Some(python_toolchain_provider), + manifest_name: Some(SharedString::new_static("pyproject.toml").into()), }, LanguageInfo { name: "rust", adapters: vec![rust_lsp_adapter], context: Some(rust_context_provider), + manifest_name: Some(SharedString::new_static("Cargo.toml").into()), ..Default::default() }, LanguageInfo { @@ -199,7 +201,7 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { LanguageInfo { name: "javascript", adapters: vec![typescript_lsp_adapter.clone(), vtsls_adapter.clone()], - context: Some(typescript_context.clone()), + context: Some(typescript_context), ..Default::default() }, LanguageInfo { @@ -234,6 +236,7 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { registration.adapters, registration.context, registration.toolchain, + registration.manifest_name, ); } @@ -241,11 +244,8 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { cx.observe_flag::({ let languages = languages.clone(); move |enabled, _| { - if enabled { - if let Some(adapter) = basedpyright_lsp_adapter.take() { - languages - .register_available_lsp_adapter(adapter.name(), move || adapter.clone()); - } + if enabled && let Some(adapter) = basedpyright_lsp_adapter.take() { + languages.register_available_lsp_adapter(adapter.name(), move || adapter.clone()); } } }) @@ -277,13 +277,13 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { move || adapter.clone() }); languages.register_available_lsp_adapter(LanguageServerName("vtsls".into()), { - let adapter = vtsls_adapter.clone(); + let adapter = vtsls_adapter; move || adapter.clone() }); languages.register_available_lsp_adapter( LanguageServerName("typescript-language-server".into()), { - let adapter = typescript_lsp_adapter.clone(); + let adapter = typescript_lsp_adapter; move || adapter.clone() }, ); @@ -340,7 +340,7 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { Arc::from(PyprojectTomlManifestProvider), ]; for provider in manifest_providers { - project::ManifestProviders::global(cx).register(provider); + project::ManifestProvidersStore::global(cx).register(provider); } } @@ -350,6 +350,7 @@ struct LanguageInfo { adapters: Vec>, context: Option>, toolchain: Option>, + manifest_name: Option, } fn register_language( @@ -358,6 +359,7 @@ fn register_language( adapters: Vec>, context: Option>, toolchain: Option>, + manifest_name: Option, ) { let config = load_config(name); for adapter in adapters { @@ -368,12 +370,14 @@ fn register_language( config.grammar.clone(), config.matcher.clone(), config.hidden, + manifest_name.clone(), Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), queries: load_queries(name), context_provider: context.clone(), toolchain_provider: toolchain.clone(), + manifest_name: manifest_name.clone(), }) }), ); diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 0524c02fd5b95c4d8ccc2fbcbd2286a53a900fa2..d21b5dabd34c311d6e08140b2bc7ed363f79f273 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -4,16 +4,16 @@ use async_trait::async_trait; use collections::HashMap; use gpui::{App, Task}; use gpui::{AsyncApp, SharedString}; +use language::Toolchain; use language::ToolchainList; use language::ToolchainLister; use language::language_settings::language_settings; use language::{ContextLocation, LanguageToolchainStore}; use language::{ContextProvider, LspAdapter, LspAdapterDelegate}; use language::{LanguageName, ManifestName, ManifestProvider, ManifestQuery}; -use language::{Toolchain, WorkspaceFoldersContent}; use lsp::LanguageServerBinary; use lsp::LanguageServerName; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use pet_core::Configuration; use pet_core::os_environment::Environment; use pet_core::python_environment::PythonEnvironmentKind; @@ -103,7 +103,7 @@ impl PythonLspAdapter { #[async_trait(?Send)] impl LspAdapter for PythonLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn initialization_options( @@ -127,7 +127,7 @@ impl LspAdapter for PythonLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { if let Some(pyright_bin) = delegate.which("pyright-langserver".as_ref()).await { @@ -204,8 +204,8 @@ impl LspAdapter for PythonLspAdapter { .should_install_npm_package( Self::SERVER_NAME.as_ref(), &server_path, - &container_dir, - &version, + container_dir, + VersionStrategy::Latest(version), ) .await; @@ -319,17 +319,9 @@ impl LspAdapter for PythonLspAdapter { self: Arc, _: &dyn Fs, adapter: &Arc, - toolchains: Arc, + toolchain: Option, cx: &mut AsyncApp, ) -> Result { - let toolchain = toolchains - .active_toolchain( - adapter.worktree_id(), - Arc::from("".as_ref()), - LanguageName::new("Python"), - cx, - ) - .await; cx.update(move |cx| { let mut user_settings = language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) @@ -346,31 +338,31 @@ impl LspAdapter for PythonLspAdapter { let interpreter_path = toolchain.path.to_string(); // Detect if this is a virtual environment - if let Some(interpreter_dir) = Path::new(&interpreter_path).parent() { - if let Some(venv_dir) = interpreter_dir.parent() { - // Check if this looks like a virtual environment - if venv_dir.join("pyvenv.cfg").exists() - || venv_dir.join("bin/activate").exists() - || venv_dir.join("Scripts/activate.bat").exists() - { - // Set venvPath and venv at the root level - // This matches the format of a pyrightconfig.json file - if let Some(parent) = venv_dir.parent() { - // Use relative path if the venv is inside the workspace - let venv_path = if parent == adapter.worktree_root_path() { - ".".to_string() - } else { - parent.to_string_lossy().into_owned() - }; - object.insert("venvPath".to_string(), Value::String(venv_path)); - } + if let Some(interpreter_dir) = Path::new(&interpreter_path).parent() + && let Some(venv_dir) = interpreter_dir.parent() + { + // Check if this looks like a virtual environment + if venv_dir.join("pyvenv.cfg").exists() + || venv_dir.join("bin/activate").exists() + || venv_dir.join("Scripts/activate.bat").exists() + { + // Set venvPath and venv at the root level + // This matches the format of a pyrightconfig.json file + if let Some(parent) = venv_dir.parent() { + // Use relative path if the venv is inside the workspace + let venv_path = if parent == adapter.worktree_root_path() { + ".".to_string() + } else { + parent.to_string_lossy().into_owned() + }; + object.insert("venvPath".to_string(), Value::String(venv_path)); + } - if let Some(venv_name) = venv_dir.file_name() { - object.insert( - "venv".to_owned(), - Value::String(venv_name.to_string_lossy().into_owned()), - ); - } + if let Some(venv_name) = venv_dir.file_name() { + object.insert( + "venv".to_owned(), + Value::String(venv_name.to_string_lossy().into_owned()), + ); } } } @@ -397,12 +389,6 @@ impl LspAdapter for PythonLspAdapter { user_settings }) } - fn manifest_name(&self) -> Option { - Some(SharedString::new_static("pyproject.toml").into()) - } - fn workspace_folders_content(&self) -> WorkspaceFoldersContent { - WorkspaceFoldersContent::WorktreeRoot - } } async fn get_cached_server_binary( @@ -725,7 +711,7 @@ impl Default for PythonToolchainProvider { } } -static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[ +static ENV_PRIORITY_LIST: &[PythonEnvironmentKind] = &[ // Prioritize non-Conda environments. PythonEnvironmentKind::Poetry, PythonEnvironmentKind::Pipenv, @@ -842,7 +828,7 @@ impl ToolchainLister for PythonToolchainProvider { .get_env_var("CONDA_PREFIX".to_string()) .map(|conda_prefix| { let is_match = |exe: &Option| { - exe.as_ref().map_or(false, |e| e.starts_with(&conda_prefix)) + exe.as_ref().is_some_and(|e| e.starts_with(&conda_prefix)) }; match (is_match(&lhs.executable), is_match(&rhs.executable)) { (true, false) => Ordering::Less, @@ -1040,14 +1026,14 @@ const BINARY_DIR: &str = if cfg!(target_os = "windows") { #[async_trait(?Send)] impl LspAdapter for PyLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - toolchains: Arc, - cx: &AsyncApp, + toolchain: Option, + _: &AsyncApp, ) -> Option { if let Some(pylsp_bin) = delegate.which(Self::SERVER_NAME.as_ref()).await { let env = delegate.shell_env().await; @@ -1057,14 +1043,7 @@ impl LspAdapter for PyLspAdapter { arguments: vec![], }) } else { - let venv = toolchains - .active_toolchain( - delegate.worktree_id(), - Arc::from("".as_ref()), - LanguageName::new("Python"), - &mut cx.clone(), - ) - .await?; + let venv = toolchain?; let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp"); pylsp_path.exists().then(|| LanguageServerBinary { path: venv.path.to_string().into(), @@ -1211,17 +1190,9 @@ impl LspAdapter for PyLspAdapter { self: Arc, _: &dyn Fs, adapter: &Arc, - toolchains: Arc, + toolchain: Option, cx: &mut AsyncApp, ) -> Result { - let toolchain = toolchains - .active_toolchain( - adapter.worktree_id(), - Arc::from("".as_ref()), - LanguageName::new("Python"), - cx, - ) - .await; cx.update(move |cx| { let mut user_settings = language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) @@ -1282,12 +1253,6 @@ impl LspAdapter for PyLspAdapter { user_settings }) } - fn manifest_name(&self) -> Option { - Some(SharedString::new_static("pyproject.toml").into()) - } - fn workspace_folders_content(&self) -> WorkspaceFoldersContent { - WorkspaceFoldersContent::WorktreeRoot - } } pub(crate) struct BasedPyrightLspAdapter { @@ -1353,7 +1318,7 @@ impl BasedPyrightLspAdapter { #[async_trait(?Send)] impl LspAdapter for BasedPyrightLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn initialization_options( @@ -1377,8 +1342,8 @@ impl LspAdapter for BasedPyrightLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - toolchains: Arc, - cx: &AsyncApp, + toolchain: Option, + _: &AsyncApp, ) -> Option { if let Some(bin) = delegate.which(Self::BINARY_NAME.as_ref()).await { let env = delegate.shell_env().await; @@ -1388,15 +1353,7 @@ impl LspAdapter for BasedPyrightLspAdapter { arguments: vec!["--stdio".into()], }) } else { - let venv = toolchains - .active_toolchain( - delegate.worktree_id(), - Arc::from("".as_ref()), - LanguageName::new("Python"), - &mut cx.clone(), - ) - .await?; - let path = Path::new(venv.path.as_ref()) + let path = Path::new(toolchain?.path.as_ref()) .parent()? .join(Self::BINARY_NAME); path.exists().then(|| LanguageServerBinary { @@ -1543,17 +1500,9 @@ impl LspAdapter for BasedPyrightLspAdapter { self: Arc, _: &dyn Fs, adapter: &Arc, - toolchains: Arc, + toolchain: Option, cx: &mut AsyncApp, ) -> Result { - let toolchain = toolchains - .active_toolchain( - adapter.worktree_id(), - Arc::from("".as_ref()), - LanguageName::new("Python"), - cx, - ) - .await; cx.update(move |cx| { let mut user_settings = language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) @@ -1570,31 +1519,31 @@ impl LspAdapter for BasedPyrightLspAdapter { let interpreter_path = toolchain.path.to_string(); // Detect if this is a virtual environment - if let Some(interpreter_dir) = Path::new(&interpreter_path).parent() { - if let Some(venv_dir) = interpreter_dir.parent() { - // Check if this looks like a virtual environment - if venv_dir.join("pyvenv.cfg").exists() - || venv_dir.join("bin/activate").exists() - || venv_dir.join("Scripts/activate.bat").exists() - { - // Set venvPath and venv at the root level - // This matches the format of a pyrightconfig.json file - if let Some(parent) = venv_dir.parent() { - // Use relative path if the venv is inside the workspace - let venv_path = if parent == adapter.worktree_root_path() { - ".".to_string() - } else { - parent.to_string_lossy().into_owned() - }; - object.insert("venvPath".to_string(), Value::String(venv_path)); - } + if let Some(interpreter_dir) = Path::new(&interpreter_path).parent() + && let Some(venv_dir) = interpreter_dir.parent() + { + // Check if this looks like a virtual environment + if venv_dir.join("pyvenv.cfg").exists() + || venv_dir.join("bin/activate").exists() + || venv_dir.join("Scripts/activate.bat").exists() + { + // Set venvPath and venv at the root level + // This matches the format of a pyrightconfig.json file + if let Some(parent) = venv_dir.parent() { + // Use relative path if the venv is inside the workspace + let venv_path = if parent == adapter.worktree_root_path() { + ".".to_string() + } else { + parent.to_string_lossy().into_owned() + }; + object.insert("venvPath".to_string(), Value::String(venv_path)); + } - if let Some(venv_name) = venv_dir.file_name() { - object.insert( - "venv".to_owned(), - Value::String(venv_name.to_string_lossy().into_owned()), - ); - } + if let Some(venv_name) = venv_dir.file_name() { + object.insert( + "venv".to_owned(), + Value::String(venv_name.to_string_lossy().into_owned()), + ); } } } @@ -1621,14 +1570,6 @@ impl LspAdapter for BasedPyrightLspAdapter { user_settings }) } - - fn manifest_name(&self) -> Option { - Some(SharedString::new_static("pyproject.toml").into()) - } - - fn workspace_folders_content(&self) -> WorkspaceFoldersContent { - WorkspaceFoldersContent::WorktreeRoot - } } #[cfg(test)] diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index b52b1e7d551429de5968ab4b1c7535f98f910d8f..c6c735714854da15194cad6d15c2e3bd124fe308 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -23,7 +23,7 @@ use std::{ sync::{Arc, LazyLock}, }; use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; -use util::fs::make_file_executable; +use util::fs::{make_file_executable, remove_matching}; use util::merge_json_value_into; use util::{ResultExt, maybe}; @@ -106,17 +106,13 @@ impl ManifestProvider for CargoManifestProvider { #[async_trait(?Send)] impl LspAdapter for RustLspAdapter { fn name(&self) -> LanguageServerName { - SERVER_NAME.clone() - } - - fn manifest_name(&self) -> Option { - Some(SharedString::new_static("Cargo.toml").into()) + SERVER_NAME } async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate.which("rust-analyzer".as_ref()).await?; @@ -162,13 +158,13 @@ impl LspAdapter for RustLspAdapter { let asset_name = Self::build_asset_name(); let asset = release .assets - .iter() + .into_iter() .find(|asset| asset.name == asset_name) .with_context(|| format!("no asset found matching `{asset_name:?}`"))?; Ok(Box::new(GitHubLspBinaryVersion { name: release.tag_name, - url: asset.browser_download_url.clone(), - digest: asset.digest.clone(), + url: asset.browser_download_url, + digest: asset.digest, })) } @@ -178,11 +174,11 @@ impl LspAdapter for RustLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { - let GitHubLspBinaryVersion { name, url, digest } = - &*version.downcast::().unwrap(); - let expected_digest = digest - .as_ref() - .and_then(|digest| digest.strip_prefix("sha256:")); + let GitHubLspBinaryVersion { + name, + url, + digest: expected_digest, + } = *version.downcast::().unwrap(); let destination_path = container_dir.join(format!("rust-analyzer-{name}")); let server_path = match Self::GITHUB_ASSET_KIND { AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place. @@ -213,7 +209,7 @@ impl LspAdapter for RustLspAdapter { }) }; if let (Some(actual_digest), Some(expected_digest)) = - (&metadata.digest, expected_digest) + (&metadata.digest, &expected_digest) { if actual_digest == expected_digest { if validity_check().await.is_ok() { @@ -229,20 +225,20 @@ impl LspAdapter for RustLspAdapter { } } - _ = fs::remove_dir_all(&destination_path).await; download_server_binary( delegate, - url, - expected_digest, + &url, + expected_digest.as_deref(), &destination_path, Self::GITHUB_ASSET_KIND, ) .await?; make_file_executable(&server_path).await?; + remove_matching(&container_dir, |path| path != destination_path).await; GithubBinaryMetadata::write_to_file( &GithubBinaryMetadata { metadata_version: 1, - digest: expected_digest.map(ToString::to_string), + digest: expected_digest, }, &metadata_path, ) @@ -407,7 +403,7 @@ impl LspAdapter for RustLspAdapter { } else if completion .detail .as_ref() - .map_or(false, |detail| detail.starts_with("macro_rules! ")) + .is_some_and(|detail| detail.starts_with("macro_rules! ")) { let text = completion.label.clone(); let len = text.len(); @@ -500,7 +496,7 @@ impl LspAdapter for RustLspAdapter { let enable_lsp_tasks = ProjectSettings::get_global(cx) .lsp .get(&SERVER_NAME) - .map_or(false, |s| s.enable_lsp_tasks); + .is_some_and(|s| s.enable_lsp_tasks); if enable_lsp_tasks { let experimental = json!({ "runnables": { @@ -585,7 +581,7 @@ impl ContextProvider for RustContextProvider { if let (Some(path), Some(stem)) = (&local_abs_path, task_variables.get(&VariableName::Stem)) { - let fragment = test_fragment(&variables, &path, stem); + let fragment = test_fragment(&variables, path, stem); variables.insert(RUST_TEST_FRAGMENT_TASK_VARIABLE, fragment); }; if let Some(test_name) = @@ -602,16 +598,14 @@ impl ContextProvider for RustContextProvider { if let Some(path) = local_abs_path .as_deref() .and_then(|local_abs_path| local_abs_path.parent()) - { - if let Some(package_name) = + && let Some(package_name) = human_readable_package_name(path, project_env.as_ref()).await - { - variables.insert(RUST_PACKAGE_TASK_VARIABLE.clone(), package_name); - } + { + variables.insert(RUST_PACKAGE_TASK_VARIABLE.clone(), package_name); } if let Some(path) = local_abs_path.as_ref() && let Some((target, manifest_path)) = - target_info_from_abs_path(&path, project_env.as_ref()).await + target_info_from_abs_path(path, project_env.as_ref()).await { if let Some(target) = target { variables.extend(TaskVariables::from_iter([ @@ -665,7 +659,7 @@ impl ContextProvider for RustContextProvider { .variables .get(CUSTOM_TARGET_DIR) .cloned(); - let run_task_args = if let Some(package_to_run) = package_to_run.clone() { + let run_task_args = if let Some(package_to_run) = package_to_run { vec!["run".into(), "-p".into(), package_to_run] } else { vec!["run".into()] @@ -1023,8 +1017,14 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option path, // Tar and gzip extract in place. + AssetKind::Zip => path.join("rust-analyzer.exe"), // zip contains a .exe + }; + anyhow::Ok(LanguageServerBinary { - path: last.context("no cached binary")?, + path, env: None, arguments: Default::default(), }) @@ -1568,7 +1568,7 @@ mod tests { let found = test_fragment( &TaskVariables::from_iter(variables.into_iter().map(|(k, v)| (k, v.to_owned()))), path, - &path.file_stem().unwrap().to_str().unwrap(), + path.file_stem().unwrap().to_str().unwrap(), ); assert_eq!(expected, found); } diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index a7edbb148cd807cf404a80aa6552c211252ec25b..47eb25405339b81b3c458b2443d4af77da883cf0 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -3,9 +3,9 @@ use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; use gpui::AsyncApp; -use language::{LanguageName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; +use language::{LanguageName, LspAdapter, LspAdapterDelegate, Toolchain}; use lsp::{LanguageServerBinary, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use project::{Fs, lsp_store::language_server_settings}; use serde_json::{Value, json}; use smol::fs; @@ -44,13 +44,13 @@ impl TailwindLspAdapter { #[async_trait(?Send)] impl LspAdapter for TailwindLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; @@ -108,7 +108,12 @@ impl LspAdapter for TailwindLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + container_dir, + VersionStrategy::Latest(version), + ) .await; if should_install_language_server { @@ -150,7 +155,7 @@ impl LspAdapter for TailwindLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let mut tailwind_user_settings = cx.update(|cx| { diff --git a/crates/languages/src/tsx/outline.scm b/crates/languages/src/tsx/outline.scm index 5dafe791e493d03f6a73fa7c155ebb03072dc4d5..f4261b9697d376f517b717bc942387190e0b6dde 100644 --- a/crates/languages/src/tsx/outline.scm +++ b/crates/languages/src/tsx/outline.scm @@ -34,12 +34,16 @@ (export_statement (lexical_declaration ["let" "const"] @context + ; Multiple names may be exported - @item is on the declarator to keep + ; ranges distinct. (variable_declarator name: (_) @name) @item)) (program (lexical_declaration ["let" "const"] @context + ; Multiple names may be defined - @item is on the declarator to keep + ; ranges distinct. (variable_declarator name: (_) @name) @item)) diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index f976b6261480a65106c510369a107c8c078e5a33..77cf1a64f1f5586e727ca2b7ccc55dbe8a6183cf 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -7,10 +7,10 @@ use gpui::{App, AppContext, AsyncApp, Task}; use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url}; use language::{ ContextLocation, ContextProvider, File, LanguageName, LanguageToolchainStore, LspAdapter, - LspAdapterDelegate, + LspAdapterDelegate, Toolchain, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use project::{Fs, lsp_store::language_server_settings}; use serde_json::{Value, json}; use smol::{fs, lock::RwLock, stream::StreamExt}; @@ -341,10 +341,10 @@ async fn detect_package_manager( fs: Arc, package_json_data: Option, ) -> &'static str { - if let Some(package_json_data) = package_json_data { - if let Some(package_manager) = package_json_data.package_manager { - return package_manager; - } + if let Some(package_json_data) = package_json_data + && let Some(package_manager) = package_json_data.package_manager + { + return package_manager; } if fs.is_file(&worktree_root.join("pnpm-lock.yaml")).await { return "pnpm"; @@ -557,7 +557,7 @@ struct TypeScriptVersions { #[async_trait(?Send)] impl LspAdapter for TypeScriptLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn fetch_latest_server_version( @@ -587,8 +587,8 @@ impl LspAdapter for TypeScriptLspAdapter { .should_install_npm_package( Self::PACKAGE_NAME, &server_path, - &container_dir, - version.typescript_version.as_str(), + container_dir, + VersionStrategy::Latest(version.typescript_version.as_str()), ) .await; @@ -722,7 +722,7 @@ impl LspAdapter for TypeScriptLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let override_options = cx.update(|cx| { @@ -822,7 +822,7 @@ impl LspAdapter for EsLintLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let workspace_root = delegate.worktree_root_path(); @@ -879,7 +879,7 @@ impl LspAdapter for EsLintLspAdapter { } fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn fetch_latest_server_version( @@ -910,7 +910,7 @@ impl LspAdapter for EsLintLspAdapter { let server_path = destination_path.join(Self::SERVER_PATH); if fs::metadata(&server_path).await.is_err() { - remove_matching(&container_dir, |entry| entry != destination_path).await; + remove_matching(&container_dir, |_| true).await; download_server_binary( delegate, diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm index 5dafe791e493d03f6a73fa7c155ebb03072dc4d5..f4261b9697d376f517b717bc942387190e0b6dde 100644 --- a/crates/languages/src/typescript/outline.scm +++ b/crates/languages/src/typescript/outline.scm @@ -34,12 +34,16 @@ (export_statement (lexical_declaration ["let" "const"] @context + ; Multiple names may be exported - @item is on the declarator to keep + ; ranges distinct. (variable_declarator name: (_) @name) @item)) (program (lexical_declaration ["let" "const"] @context + ; Multiple names may be defined - @item is on the declarator to keep + ; ranges distinct. (variable_declarator name: (_) @name) @item)) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 33751f733e5b3f81e7fb145de8af6633673a4e0f..f7152b0b5df8e2249c43b1f2ffa2b490bf001961 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -2,9 +2,9 @@ use anyhow::Result; use async_trait::async_trait; use collections::HashMap; use gpui::AsyncApp; -use language::{LanguageName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; +use language::{LanguageName, LspAdapter, LspAdapterDelegate, Toolchain}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use project::{Fs, lsp_store::language_server_settings}; use serde_json::Value; use std::{ @@ -67,7 +67,7 @@ const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("vtsls"); #[async_trait(?Send)] impl LspAdapter for VtslsLspAdapter { fn name(&self) -> LanguageServerName { - SERVER_NAME.clone() + SERVER_NAME } async fn fetch_latest_server_version( @@ -86,7 +86,7 @@ impl LspAdapter for VtslsLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let env = delegate.shell_env().await; @@ -115,7 +115,7 @@ impl LspAdapter for VtslsLspAdapter { Self::PACKAGE_NAME, &server_path, &container_dir, - &latest_version.server_version, + VersionStrategy::Latest(&latest_version.server_version), ) .await { @@ -128,7 +128,7 @@ impl LspAdapter for VtslsLspAdapter { Self::TYPESCRIPT_PACKAGE_NAME, &container_dir.join(Self::TYPESCRIPT_TSDK_PATH), &container_dir, - &latest_version.typescript_version, + VersionStrategy::Latest(&latest_version.typescript_version), ) .await { @@ -211,7 +211,7 @@ impl LspAdapter for VtslsLspAdapter { self: Arc, fs: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let tsdk_path = Self::tsdk_path(fs, delegate).await; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 815605d5242c9068ae908435d7ac751cad61d2dc..b9197b12ae85af5ee2a9aa2b5807a4f0410a1dda 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -2,11 +2,9 @@ use anyhow::{Context as _, Result}; use async_trait::async_trait; use futures::StreamExt; use gpui::AsyncApp; -use language::{ - LanguageToolchainStore, LspAdapter, LspAdapterDelegate, language_settings::AllLanguageSettings, -}; +use language::{LspAdapter, LspAdapterDelegate, Toolchain, language_settings::AllLanguageSettings}; use lsp::{LanguageServerBinary, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionStrategy}; use project::{Fs, lsp_store::language_server_settings}; use serde_json::Value; use settings::{Settings, SettingsLocation}; @@ -40,7 +38,7 @@ impl YamlLspAdapter { #[async_trait(?Send)] impl LspAdapter for YamlLspAdapter { fn name(&self) -> LanguageServerName { - Self::SERVER_NAME.clone() + Self::SERVER_NAME } async fn fetch_latest_server_version( @@ -57,7 +55,7 @@ impl LspAdapter for YamlLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; @@ -104,7 +102,12 @@ impl LspAdapter for YamlLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + container_dir, + VersionStrategy::Latest(version), + ) .await; if should_install_language_server { @@ -130,7 +133,7 @@ impl LspAdapter for YamlLspAdapter { self: Arc, _: &dyn Fs, delegate: &Arc, - _: Arc, + _: Option, cx: &mut AsyncApp, ) -> Result { let location = SettingsLocation { diff --git a/crates/languages/src/yaml/overrides.scm b/crates/languages/src/yaml/overrides.scm new file mode 100644 index 0000000000000000000000000000000000000000..9503051a62080eb2fdfca3416ef9e5286464dd17 --- /dev/null +++ b/crates/languages/src/yaml/overrides.scm @@ -0,0 +1,5 @@ +(comment) @comment.inclusive +[ + (single_quote_scalar) + (double_quote_scalar) +] @string diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index 821fd5d39006b517d264687d7fb9a25fb570d0c2..3575325ac04a06ffc577438c6a323cf411cee859 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -25,6 +25,7 @@ async-trait.workspace = true collections.workspace = true cpal.workspace = true futures.workspace = true +audio.workspace = true gpui = { workspace = true, features = ["screen-capture", "x11", "wayland", "windows-manifest"] } gpui_tokio.workspace = true http_client_tls.workspace = true @@ -35,10 +36,13 @@ nanoid.workspace = true parking_lot.workspace = true postage.workspace = true smallvec.workspace = true +settings.workspace = true tokio-tungstenite.workspace = true util.workspace = true workspace-hack.workspace = true +rodio = { workspace = true, features = ["wav_output"] } + [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [ diff --git a/crates/livekit_client/examples/test_app.rs b/crates/livekit_client/examples/test_app.rs index e1d01df534e142502abb5f17392e19299f8ae158..75806429905e6bcbfcc17f25a29007f18e78757b 100644 --- a/crates/livekit_client/examples/test_app.rs +++ b/crates/livekit_client/examples/test_app.rs @@ -159,14 +159,14 @@ impl LivekitWindow { if output .audio_output_stream .as_ref() - .map_or(false, |(track, _)| track.sid() == unpublish_sid) + .is_some_and(|(track, _)| track.sid() == unpublish_sid) { output.audio_output_stream.take(); } if output .screen_share_output_view .as_ref() - .map_or(false, |(track, _)| track.sid() == unpublish_sid) + .is_some_and(|(track, _)| track.sid() == unpublish_sid) { output.screen_share_output_view.take(); } @@ -183,7 +183,7 @@ impl LivekitWindow { match track { livekit_client::RemoteTrack::Audio(track) => { output.audio_output_stream = Some(( - publication.clone(), + publication, room.play_remote_audio_track(&track, cx).unwrap(), )); } diff --git a/crates/livekit_client/src/lib.rs b/crates/livekit_client/src/lib.rs index 149859fdc8ecd8533332c9462a090adb5496f100..055aa3704e06f25a21c69294343539289d8acb49 100644 --- a/crates/livekit_client/src/lib.rs +++ b/crates/livekit_client/src/lib.rs @@ -1,7 +1,13 @@ +use anyhow::Context as _; use collections::HashMap; mod remote_video_track_view; +use cpal::traits::HostTrait as _; pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent}; +use rodio::DeviceTrait as _; + +mod record; +pub use record::CaptureInput; #[cfg(not(any( test, @@ -18,6 +24,11 @@ mod livekit_client; )))] pub use livekit_client::*; +// If you need proper LSP in livekit_client you've got to comment +// - the cfg blocks above +// - the mods: mock_client & test and their conditional blocks +// - the pub use mock_client::* and their conditional blocks + #[cfg(any( test, feature = "test-support", @@ -168,3 +179,59 @@ pub enum RoomEvent { Reconnecting, Reconnected, } + +pub(crate) fn default_device( + input: bool, +) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> { + let device; + let config; + if input { + device = cpal::default_host() + .default_input_device() + .context("no audio input device available")?; + config = device + .default_input_config() + .context("failed to get default input config")?; + } else { + device = cpal::default_host() + .default_output_device() + .context("no audio output device available")?; + config = device + .default_output_config() + .context("failed to get default output config")?; + } + Ok((device, config)) +} + +pub(crate) fn get_sample_data( + sample_format: cpal::SampleFormat, + data: &cpal::Data, +) -> anyhow::Result> { + match sample_format { + cpal::SampleFormat::I8 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::I16 => Ok(data.as_slice::().unwrap().to_vec()), + cpal::SampleFormat::I24 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::I32 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::I64 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::U8 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::U16 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::U32 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::U64 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::F32 => Ok(convert_sample_data::(data)), + cpal::SampleFormat::F64 => Ok(convert_sample_data::(data)), + _ => anyhow::bail!("Unsupported sample format"), + } +} + +pub(crate) fn convert_sample_data< + TSource: cpal::SizedSample, + TDest: cpal::SizedSample + cpal::FromSample, +>( + data: &cpal::Data, +) -> Vec { + data.as_slice::() + .unwrap() + .iter() + .map(|e| e.to_sample::()) + .collect() +} diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 8f0ac1a456aea1ac32879e121961e87930035dba..0751b014f42b2743efc54431a1bac7762ebb7884 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,11 +1,14 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; +use audio::AudioSettings; use collections::HashMap; use futures::{SinkExt, channel::mpsc}; use gpui::{App, AsyncApp, ScreenCaptureSource, ScreenCaptureStream, Task}; use gpui_tokio::Tokio; +use log::info; use playback::capture_local_video_track; +use settings::Settings; mod playback; @@ -123,9 +126,14 @@ impl Room { pub fn play_remote_audio_track( &self, track: &RemoteAudioTrack, - _cx: &App, + cx: &mut App, ) -> Result { - Ok(self.playback.play_remote_audio_track(&track.0)) + if AudioSettings::get_global(cx).rodio_audio { + info!("Using experimental.rodio_audio audio pipeline"); + playback::play_remote_audio_track(&track.0, cx) + } else { + Ok(self.playback.play_remote_audio_track(&track.0)) + } } } diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index f14e156125f6da815fe24aabd798e53c6c3e82b8..d6b64dbacaad018b664eb6d196106a80e83229a1 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -1,7 +1,6 @@ use anyhow::{Context as _, Result}; -use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _}; -use cpal::{Data, FromSample, I24, SampleFormat, SizedSample}; +use cpal::traits::{DeviceTrait, StreamTrait as _}; use futures::channel::mpsc::UnboundedSender; use futures::{Stream, StreamExt as _}; use gpui::{ @@ -19,13 +18,16 @@ use livekit::webrtc::{ video_stream::native::NativeVideoStream, }; use parking_lot::Mutex; +use rodio::Source; use std::cell::RefCell; use std::sync::Weak; -use std::sync::atomic::{self, AtomicI32}; +use std::sync::atomic::{AtomicBool, AtomicI32, Ordering}; use std::time::Duration; use std::{borrow::Cow, collections::VecDeque, sync::Arc, thread}; use util::{ResultExt as _, maybe}; +mod source; + pub(crate) struct AudioStack { executor: BackgroundExecutor, apm: Arc>, @@ -41,6 +43,29 @@ pub(crate) struct AudioStack { const SAMPLE_RATE: u32 = 48000; const NUM_CHANNELS: u32 = 2; +pub(crate) fn play_remote_audio_track( + track: &livekit::track::RemoteAudioTrack, + cx: &mut gpui::App, +) -> Result { + let stop_handle = Arc::new(AtomicBool::new(false)); + let stop_handle_clone = stop_handle.clone(); + let stream = source::LiveKitStream::new(cx.background_executor(), track) + .stoppable() + .periodic_access(Duration::from_millis(50), move |s| { + if stop_handle.load(Ordering::Relaxed) { + s.stop(); + } + }); + audio::Audio::play_source(stream, cx).context("Could not play audio")?; + + let on_drop = util::defer(move || { + stop_handle_clone.store(true, Ordering::Relaxed); + }); + Ok(AudioStream::Output { + _drop: Box::new(on_drop), + }) +} + impl AudioStack { pub(crate) fn new(executor: BackgroundExecutor) -> Self { let apm = Arc::new(Mutex::new(apm::AudioProcessingModule::new( @@ -62,7 +87,7 @@ impl AudioStack { ) -> AudioStream { let output_task = self.start_output(); - let next_ssrc = self.next_ssrc.fetch_add(1, atomic::Ordering::Relaxed); + let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed); let source = AudioMixerSource { ssrc: next_ssrc, sample_rate: SAMPLE_RATE, @@ -98,6 +123,23 @@ impl AudioStack { } } + fn start_output(&self) -> Arc> { + if let Some(task) = self._output_task.borrow().upgrade() { + return task; + } + let task = Arc::new(self.executor.spawn({ + let apm = self.apm.clone(); + let mixer = self.mixer.clone(); + async move { + Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS) + .await + .log_err(); + } + })); + *self._output_task.borrow_mut() = Arc::downgrade(&task); + task + } + pub(crate) fn capture_local_microphone_track( &self, ) -> Result<(crate::LocalAudioTrack, AudioStream)> { @@ -118,7 +160,6 @@ impl AudioStack { let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded(); let transmit_task = self.executor.spawn({ - let source = source.clone(); async move { while let Some(frame) = frame_rx.next().await { source.capture_frame(&frame).await.log_err(); @@ -133,29 +174,12 @@ impl AudioStack { drop(transmit_task); drop(capture_task); }); - return Ok(( + Ok(( super::LocalAudioTrack(track), AudioStream::Output { _drop: Box::new(on_drop), }, - )); - } - - fn start_output(&self) -> Arc> { - if let Some(task) = self._output_task.borrow().upgrade() { - return task; - } - let task = Arc::new(self.executor.spawn({ - let apm = self.apm.clone(); - let mixer = self.mixer.clone(); - async move { - Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS) - .await - .log_err(); - } - })); - *self._output_task.borrow_mut() = Arc::downgrade(&task); - task + )) } async fn play_output( @@ -166,7 +190,7 @@ impl AudioStack { ) -> Result<()> { loop { let mut device_change_listener = DeviceChangeListener::new(false)?; - let (output_device, output_config) = default_device(false)?; + let (output_device, output_config) = crate::default_device(false)?; let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); let mixer = mixer.clone(); let apm = apm.clone(); @@ -238,7 +262,7 @@ impl AudioStack { ) -> Result<()> { loop { let mut device_change_listener = DeviceChangeListener::new(true)?; - let (device, config) = default_device(true)?; + let (device, config) = crate::default_device(true)?; let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); let apm = apm.clone(); let frame_tx = frame_tx.clone(); @@ -262,7 +286,7 @@ impl AudioStack { config.sample_format(), move |data, _: &_| { let data = - Self::get_sample_data(config.sample_format(), data).log_err(); + crate::get_sample_data(config.sample_format(), data).log_err(); let Some(data) = data else { return; }; @@ -320,33 +344,6 @@ impl AudioStack { drop(end_on_drop_tx) } } - - fn get_sample_data(sample_format: SampleFormat, data: &Data) -> Result> { - match sample_format { - SampleFormat::I8 => Ok(Self::convert_sample_data::(data)), - SampleFormat::I16 => Ok(data.as_slice::().unwrap().to_vec()), - SampleFormat::I24 => Ok(Self::convert_sample_data::(data)), - SampleFormat::I32 => Ok(Self::convert_sample_data::(data)), - SampleFormat::I64 => Ok(Self::convert_sample_data::(data)), - SampleFormat::U8 => Ok(Self::convert_sample_data::(data)), - SampleFormat::U16 => Ok(Self::convert_sample_data::(data)), - SampleFormat::U32 => Ok(Self::convert_sample_data::(data)), - SampleFormat::U64 => Ok(Self::convert_sample_data::(data)), - SampleFormat::F32 => Ok(Self::convert_sample_data::(data)), - SampleFormat::F64 => Ok(Self::convert_sample_data::(data)), - _ => anyhow::bail!("Unsupported sample format"), - } - } - - fn convert_sample_data>( - data: &Data, - ) -> Vec { - data.as_slice::() - .unwrap() - .iter() - .map(|e| e.to_sample::()) - .collect() - } } use super::LocalVideoTrack; @@ -393,27 +390,6 @@ pub(crate) async fn capture_local_video_track( )) } -fn default_device(input: bool) -> Result<(cpal::Device, cpal::SupportedStreamConfig)> { - let device; - let config; - if input { - device = cpal::default_host() - .default_input_device() - .context("no audio input device available")?; - config = device - .default_input_config() - .context("failed to get default input config")?; - } else { - device = cpal::default_host() - .default_output_device() - .context("no audio output device available")?; - config = device - .default_output_config() - .context("failed to get default output config")?; - } - Ok((device, config)) -} - #[derive(Clone)] struct AudioMixerSource { ssrc: i32, diff --git a/crates/livekit_client/src/livekit_client/playback/source.rs b/crates/livekit_client/src/livekit_client/playback/source.rs new file mode 100644 index 0000000000000000000000000000000000000000..021640247ddc8da17025dc8bf852003ead468852 --- /dev/null +++ b/crates/livekit_client/src/livekit_client/playback/source.rs @@ -0,0 +1,67 @@ +use futures::StreamExt; +use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame}; +use livekit::track::RemoteAudioTrack; +use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter}; + +use crate::livekit_client::playback::{NUM_CHANNELS, SAMPLE_RATE}; + +fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer { + let samples = frame.data.iter().copied(); + let samples = SampleTypeConverter::<_, _>::new(samples); + let samples: Vec = samples.collect(); + SamplesBuffer::new(frame.num_channels as u16, frame.sample_rate, samples) +} + +pub struct LiveKitStream { + // shared_buffer: SharedBuffer, + inner: rodio::queue::SourcesQueueOutput, + _receiver_task: gpui::Task<()>, +} + +impl LiveKitStream { + pub fn new(executor: &gpui::BackgroundExecutor, track: &RemoteAudioTrack) -> Self { + let mut stream = + NativeAudioStream::new(track.rtc_track(), SAMPLE_RATE as i32, NUM_CHANNELS as i32); + let (queue_input, queue_output) = rodio::queue::queue(true); + // spawn rtc stream + let receiver_task = executor.spawn({ + async move { + while let Some(frame) = stream.next().await { + let samples = frame_to_samplesbuffer(frame); + queue_input.append(samples); + } + } + }); + + LiveKitStream { + _receiver_task: receiver_task, + inner: queue_output, + } + } +} + +impl Iterator for LiveKitStream { + type Item = rodio::Sample; + + fn next(&mut self) -> Option { + self.inner.next() + } +} + +impl Source for LiveKitStream { + fn current_span_len(&self) -> Option { + self.inner.current_span_len() + } + + fn channels(&self) -> rodio::ChannelCount { + self.inner.channels() + } + + fn sample_rate(&self) -> rodio::SampleRate { + self.inner.sample_rate() + } + + fn total_duration(&self) -> Option { + self.inner.total_duration() + } +} diff --git a/crates/livekit_client/src/record.rs b/crates/livekit_client/src/record.rs new file mode 100644 index 0000000000000000000000000000000000000000..925c0d4c67f91bcb147a9fb8d0d99b0aa1ab1810 --- /dev/null +++ b/crates/livekit_client/src/record.rs @@ -0,0 +1,91 @@ +use std::{ + env, + path::{Path, PathBuf}, + sync::{Arc, Mutex}, + time::Duration, +}; + +use anyhow::{Context, Result}; +use cpal::traits::{DeviceTrait, StreamTrait}; +use rodio::{buffer::SamplesBuffer, conversions::SampleTypeConverter}; +use util::ResultExt; + +pub struct CaptureInput { + pub name: String, + config: cpal::SupportedStreamConfig, + samples: Arc>>, + _stream: cpal::Stream, +} + +impl CaptureInput { + pub fn start() -> anyhow::Result { + let (device, config) = crate::default_device(true)?; + let name = device.name().unwrap_or("".to_string()); + log::info!("Using microphone: {}", name); + + let samples = Arc::new(Mutex::new(Vec::new())); + let stream = start_capture(device, config.clone(), samples.clone())?; + + Ok(Self { + name, + _stream: stream, + config, + samples, + }) + } + + pub fn finish(self) -> Result { + let name = self.name; + let mut path = env::current_dir().context("Could not get current dir")?; + path.push(&format!("test_recording_{name}.wav")); + log::info!("Test recording written to: {}", path.display()); + write_out(self.samples, self.config, &path)?; + Ok(path) + } +} + +fn start_capture( + device: cpal::Device, + config: cpal::SupportedStreamConfig, + samples: Arc>>, +) -> Result { + let stream = device + .build_input_stream_raw( + &config.config(), + config.sample_format(), + move |data, _: &_| { + let data = crate::get_sample_data(config.sample_format(), data).log_err(); + let Some(data) = data else { + return; + }; + samples + .try_lock() + .expect("Only locked after stream ends") + .extend_from_slice(&data); + }, + |err| log::error!("error capturing audio track: {:?}", err), + Some(Duration::from_millis(100)), + ) + .context("failed to build input stream")?; + + stream.play()?; + Ok(stream) +} + +fn write_out( + samples: Arc>>, + config: cpal::SupportedStreamConfig, + path: &Path, +) -> Result<()> { + let samples = std::mem::take( + &mut *samples + .try_lock() + .expect("Stream has ended, callback cant hold the lock"), + ); + let samples: Vec = SampleTypeConverter::<_, f32>::new(samples.into_iter()).collect(); + let mut samples = SamplesBuffer::new(config.channels(), config.sample_rate().0, samples); + match rodio::output_to_wav(&mut samples, path) { + Ok(_) => Ok(()), + Err(e) => Err(anyhow::anyhow!("Failed to write wav file: {}", e)), + } +} diff --git a/crates/livekit_client/src/test.rs b/crates/livekit_client/src/test.rs index e02c4d876fbe3411cf1730f3d97aaf8db3e208b6..873e0222d013c20c5f4aff2a263b925c7d21aff6 100644 --- a/crates/livekit_client/src/test.rs +++ b/crates/livekit_client/src/test.rs @@ -421,7 +421,7 @@ impl TestServer { track_sid: &TrackSid, muted: bool, ) -> Result<()> { - let claims = livekit_api::token::validate(&token, &self.secret_key)?; + let claims = livekit_api::token::validate(token, &self.secret_key)?; let room_name = claims.video.room.unwrap(); let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); let mut server_rooms = self.rooms.lock(); @@ -475,7 +475,7 @@ impl TestServer { } pub(crate) fn is_track_muted(&self, token: &str, track_sid: &TrackSid) -> Option { - let claims = livekit_api::token::validate(&token, &self.secret_key).ok()?; + let claims = livekit_api::token::validate(token, &self.secret_key).ok()?; let room_name = claims.video.room.unwrap(); let mut server_rooms = self.rooms.lock(); @@ -736,14 +736,14 @@ impl Room { impl Drop for RoomState { fn drop(&mut self) { - if self.connection_state == ConnectionState::Connected { - if let Ok(server) = TestServer::get(&self.url) { - let executor = server.executor.clone(); - let token = self.token.clone(); - executor - .spawn(async move { server.leave_room(token).await.ok() }) - .detach(); - } + if self.connection_state == ConnectionState::Connected + && let Ok(server) = TestServer::get(&self.url) + { + let executor = server.executor.clone(); + let token = self.token.clone(); + executor + .spawn(async move { server.leave_room(token).await.ok() }) + .detach(); } } } diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index a92787cd3e74d3a8b1e18f1d4cd41d4cf300d484..942225d09837c206f54aa324f9b58ec214f92ba2 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -45,7 +45,7 @@ use util::{ConnectionResult, ResultExt, TryFutureExt, redact}; const JSON_RPC_VERSION: &str = "2.0"; const CONTENT_LEN_HEADER: &str = "Content-Length: "; -const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2); +pub const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2); const SERVER_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); type NotificationHandler = Box, Value, &mut AsyncApp)>; @@ -318,6 +318,8 @@ impl LanguageServer { } else { root_path.parent().unwrap_or_else(|| Path::new("/")) }; + let root_uri = Url::from_file_path(&working_dir) + .map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?; log::info!( "starting language server process. binary path: {:?}, working directory: {:?}, args: {:?}", @@ -345,8 +347,6 @@ impl LanguageServer { let stdin = server.stdin.take().unwrap(); let stdout = server.stdout.take().unwrap(); let stderr = server.stderr.take().unwrap(); - let root_uri = Url::from_file_path(&working_dir) - .map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?; let server = Self::new_internal( server_id, server_name, @@ -651,7 +651,7 @@ impl LanguageServer { capabilities: ClientCapabilities { general: Some(GeneralClientCapabilities { position_encodings: Some(vec![PositionEncodingKind::UTF16]), - ..Default::default() + ..GeneralClientCapabilities::default() }), workspace: Some(WorkspaceClientCapabilities { configuration: Some(true), @@ -665,6 +665,7 @@ impl LanguageServer { workspace_folders: Some(true), symbol: Some(WorkspaceSymbolClientCapabilities { resolve_support: None, + dynamic_registration: Some(true), ..WorkspaceSymbolClientCapabilities::default() }), inlay_hint: Some(InlayHintWorkspaceClientCapabilities { @@ -688,21 +689,21 @@ impl LanguageServer { ..WorkspaceEditClientCapabilities::default() }), file_operations: Some(WorkspaceFileOperationsClientCapabilities { - dynamic_registration: Some(false), + dynamic_registration: Some(true), did_rename: Some(true), will_rename: Some(true), - ..Default::default() + ..WorkspaceFileOperationsClientCapabilities::default() }), apply_edit: Some(true), execute_command: Some(ExecuteCommandClientCapabilities { - dynamic_registration: Some(false), + dynamic_registration: Some(true), }), - ..Default::default() + ..WorkspaceClientCapabilities::default() }), text_document: Some(TextDocumentClientCapabilities { definition: Some(GotoCapability { link_support: Some(true), - dynamic_registration: None, + dynamic_registration: Some(true), }), code_action: Some(CodeActionClientCapabilities { code_action_literal_support: Some(CodeActionLiteralSupport { @@ -725,7 +726,8 @@ impl LanguageServer { "command".to_string(), ], }), - ..Default::default() + dynamic_registration: Some(true), + ..CodeActionClientCapabilities::default() }), completion: Some(CompletionClientCapabilities { completion_item: Some(CompletionItemCapability { @@ -751,7 +753,7 @@ impl LanguageServer { MarkupKind::Markdown, MarkupKind::PlainText, ]), - ..Default::default() + ..CompletionItemCapability::default() }), insert_text_mode: Some(InsertTextMode::ADJUST_INDENTATION), completion_list: Some(CompletionListCapability { @@ -764,18 +766,20 @@ impl LanguageServer { ]), }), context_support: Some(true), - ..Default::default() + dynamic_registration: Some(true), + ..CompletionClientCapabilities::default() }), rename: Some(RenameClientCapabilities { prepare_support: Some(true), prepare_support_default_behavior: Some( PrepareSupportDefaultBehavior::IDENTIFIER, ), - ..Default::default() + dynamic_registration: Some(true), + ..RenameClientCapabilities::default() }), hover: Some(HoverClientCapabilities { content_format: Some(vec![MarkupKind::Markdown]), - dynamic_registration: None, + dynamic_registration: Some(true), }), inlay_hint: Some(InlayHintClientCapabilities { resolve_support: Some(InlayHintResolveClientCapabilities { @@ -787,7 +791,7 @@ impl LanguageServer { "label.command".to_string(), ], }), - dynamic_registration: Some(false), + dynamic_registration: Some(true), }), publish_diagnostics: Some(PublishDiagnosticsClientCapabilities { related_information: Some(true), @@ -818,26 +822,29 @@ impl LanguageServer { }), active_parameter_support: Some(true), }), + dynamic_registration: Some(true), ..SignatureHelpClientCapabilities::default() }), synchronization: Some(TextDocumentSyncClientCapabilities { did_save: Some(true), + dynamic_registration: Some(true), ..TextDocumentSyncClientCapabilities::default() }), code_lens: Some(CodeLensClientCapabilities { - dynamic_registration: Some(false), + dynamic_registration: Some(true), }), document_symbol: Some(DocumentSymbolClientCapabilities { hierarchical_document_symbol_support: Some(true), + dynamic_registration: Some(true), ..DocumentSymbolClientCapabilities::default() }), diagnostic: Some(DiagnosticClientCapabilities { - dynamic_registration: Some(false), + dynamic_registration: Some(true), related_document_support: Some(true), }) .filter(|_| pull_diagnostics), color_provider: Some(DocumentColorClientCapabilities { - dynamic_registration: Some(false), + dynamic_registration: Some(true), }), ..TextDocumentClientCapabilities::default() }), @@ -850,7 +857,7 @@ impl LanguageServer { show_message: Some(ShowMessageRequestClientCapabilities { message_action_item: None, }), - ..Default::default() + ..WindowClientCapabilities::default() }), }, trace: None, @@ -862,8 +869,7 @@ impl LanguageServer { } }), locale: None, - - ..Default::default() + ..InitializeParams::default() } } @@ -1672,7 +1678,7 @@ impl LanguageServer { workspace_symbol_provider: Some(OneOf::Left(true)), implementation_provider: Some(ImplementationProviderCapability::Simple(true)), type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), - ..Default::default() + ..ServerCapabilities::default() } } } diff --git a/crates/markdown/examples/markdown.rs b/crates/markdown/examples/markdown.rs index bf685bd9acfe9f678454dffc538ca66e3ca0910d..c651c7921d4d92562e68bfea6cb1954132c215bc 100644 --- a/crates/markdown/examples/markdown.rs +++ b/crates/markdown/examples/markdown.rs @@ -77,16 +77,16 @@ impl Render for MarkdownExample { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let markdown_style = MarkdownStyle { base_text_style: gpui::TextStyle { - font_family: "Zed Plex Sans".into(), + font_family: ".ZedSans".into(), color: cx.theme().colors().terminal_ansi_black, ..Default::default() }, code_block: StyleRefinement::default() - .font_family("Zed Plex Mono") + .font_family(".ZedMono") .m(rems(1.)) .bg(rgb(0xAAAAAAA)), inline_code: gpui::TextStyleRefinement { - font_family: Some("Zed Mono".into()), + font_family: Some(".ZedMono".into()), color: Some(cx.theme().colors().editor_foreground), background_color: Some(cx.theme().colors().editor_background), ..Default::default() diff --git a/crates/markdown/examples/markdown_as_child.rs b/crates/markdown/examples/markdown_as_child.rs index 862b657c8c50c7adc88642f1af21a4c075ff77f2..16c198601a31707602aea3dd250e3958c4c8f0fb 100644 --- a/crates/markdown/examples/markdown_as_child.rs +++ b/crates/markdown/examples/markdown_as_child.rs @@ -30,7 +30,7 @@ pub fn main() { let node_runtime = NodeRuntime::unavailable(); let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); - languages::init(language_registry.clone(), node_runtime, cx); + languages::init(language_registry, node_runtime, cx); theme::init(LoadThemes::JustBase, cx); Assets.load_fonts(cx).unwrap(); diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index dba4bc64b191b1e189cd114b2f66cd408c2feece..755506bd126daff19b54888d76021844c5097a1c 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -340,27 +340,26 @@ impl Markdown { } for (range, event) in &events { - if let MarkdownEvent::Start(MarkdownTag::Image { dest_url, .. }) = event { - if let Some(data_url) = dest_url.strip_prefix("data:") { - let Some((mime_info, data)) = data_url.split_once(',') else { - continue; - }; - let Some((mime_type, encoding)) = mime_info.split_once(';') else { - continue; - }; - let Some(format) = ImageFormat::from_mime_type(mime_type) else { - continue; - }; - let is_base64 = encoding == "base64"; - if is_base64 { - if let Some(bytes) = base64::prelude::BASE64_STANDARD - .decode(data) - .log_with_level(Level::Debug) - { - let image = Arc::new(Image::from_bytes(format, bytes)); - images_by_source_offset.insert(range.start, image); - } - } + if let MarkdownEvent::Start(MarkdownTag::Image { dest_url, .. }) = event + && let Some(data_url) = dest_url.strip_prefix("data:") + { + let Some((mime_info, data)) = data_url.split_once(',') else { + continue; + }; + let Some((mime_type, encoding)) = mime_info.split_once(';') else { + continue; + }; + let Some(format) = ImageFormat::from_mime_type(mime_type) else { + continue; + }; + let is_base64 = encoding == "base64"; + if is_base64 + && let Some(bytes) = base64::prelude::BASE64_STANDARD + .decode(data) + .log_with_level(Level::Debug) + { + let image = Arc::new(Image::from_bytes(format, bytes)); + images_by_source_offset.insert(range.start, image); } } } @@ -659,13 +658,13 @@ impl MarkdownElement { let rendered_text = rendered_text.clone(); move |markdown, event: &MouseUpEvent, phase, window, cx| { if phase.bubble() { - if let Some(pressed_link) = markdown.pressed_link.take() { - if Some(&pressed_link) == rendered_text.link_for_position(event.position) { - if let Some(open_url) = on_open_url.as_ref() { - open_url(pressed_link.destination_url, window, cx); - } else { - cx.open_url(&pressed_link.destination_url); - } + if let Some(pressed_link) = markdown.pressed_link.take() + && Some(&pressed_link) == rendered_text.link_for_position(event.position) + { + if let Some(open_url) = on_open_url.as_ref() { + open_url(pressed_link.destination_url, window, cx); + } else { + cx.open_url(&pressed_link.destination_url); } } } else if markdown.selection.pending { @@ -758,10 +757,10 @@ impl Element for MarkdownElement { let mut current_img_block_range: Option> = None; for (range, event) in parsed_markdown.events.iter() { // Skip alt text for images that rendered - if let Some(current_img_block_range) = ¤t_img_block_range { - if current_img_block_range.end > range.end { - continue; - } + if let Some(current_img_block_range) = ¤t_img_block_range + && current_img_block_range.end > range.end + { + continue; } match event { @@ -875,7 +874,7 @@ impl Element for MarkdownElement { (CodeBlockRenderer::Custom { render, .. }, _) => { let parent_container = render( kind, - &parsed_markdown, + parsed_markdown, range.clone(), metadata.clone(), window, @@ -1084,7 +1083,15 @@ impl Element for MarkdownElement { self.markdown.clone(), cx, ); - el.child(div().absolute().top_1().right_1().w_5().child(codeblock)) + el.child( + h_flex() + .w_5() + .absolute() + .top_1() + .right_1() + .justify_center() + .child(codeblock), + ) }); } @@ -1312,11 +1319,11 @@ fn render_copy_code_block_button( }, ) .icon_color(Color::Muted) + .icon_size(IconSize::Small) .shape(ui::IconButtonShape::Square) .tooltip(Tooltip::text("Copy Code")) .on_click({ - let id = id.clone(); - let markdown = markdown.clone(); + let markdown = markdown; move |_event, _window, cx| { let id = id.clone(); markdown.update(cx, |this, cx| { @@ -1693,10 +1700,10 @@ impl RenderedText { while let Some(line) = lines.next() { let line_bounds = line.layout.bounds(); if position.y > line_bounds.bottom() { - if let Some(next_line) = lines.peek() { - if position.y < next_line.layout.bounds().top() { - return Err(line.source_end); - } + if let Some(next_line) = lines.peek() + && position.y < next_line.layout.bounds().top() + { + return Err(line.source_end); } continue; diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 1035335ccb40f63133c727b5a5be8930d42b818f..3720e5b1ef5f61f0a209ac5617119de61ed05517 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -247,7 +247,7 @@ pub fn parse_markdown( events.push(event_for( text, range.source_range.start..range.source_range.start + prefix_len, - &head, + head, )); range.parsed = CowStr::Boxed(tail.into()); range.merged_range.start += prefix_len; diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 27691f2ecffadb7a7df1e9647e7d1d6487135974..b51b98a2ed64c72d76a8ca6e7316b6866bdcd9fe 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -76,22 +76,22 @@ impl<'a> MarkdownParser<'a> { if self.eof() || (steps + self.cursor) >= self.tokens.len() { return self.tokens.last(); } - return self.tokens.get(self.cursor + steps); + self.tokens.get(self.cursor + steps) } fn previous(&self) -> Option<&(Event<'_>, Range)> { if self.cursor == 0 || self.cursor > self.tokens.len() { return None; } - return self.tokens.get(self.cursor - 1); + self.tokens.get(self.cursor - 1) } fn current(&self) -> Option<&(Event<'_>, Range)> { - return self.peek(0); + self.peek(0) } fn current_event(&self) -> Option<&Event<'_>> { - return self.current().map(|(event, _)| event); + self.current().map(|(event, _)| event) } fn is_text_like(event: &Event) -> bool { @@ -178,7 +178,6 @@ impl<'a> MarkdownParser<'a> { _ => None, }, Event::Rule => { - let source_range = source_range.clone(); self.cursor += 1; Some(vec![ParsedMarkdownElement::HorizontalRule(source_range)]) } @@ -300,13 +299,12 @@ impl<'a> MarkdownParser<'a> { if style != MarkdownHighlightStyle::default() && last_run_len < text.len() { let mut new_highlight = true; - if let Some((last_range, last_style)) = highlights.last_mut() { - if last_range.end == last_run_len - && last_style == &MarkdownHighlight::Style(style.clone()) - { - last_range.end = text.len(); - new_highlight = false; - } + if let Some((last_range, last_style)) = highlights.last_mut() + && last_range.end == last_run_len + && last_style == &MarkdownHighlight::Style(style.clone()) + { + last_range.end = text.len(); + new_highlight = false; } if new_highlight { highlights.push(( @@ -402,7 +400,7 @@ impl<'a> MarkdownParser<'a> { } if !text.is_empty() { markdown_text_like.push(MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: source_range.clone(), + source_range, contents: text, highlights, regions, @@ -421,7 +419,7 @@ impl<'a> MarkdownParser<'a> { self.cursor += 1; ParsedMarkdownHeading { - source_range: source_range.clone(), + source_range, level: match level { pulldown_cmark::HeadingLevel::H1 => HeadingLevel::H1, pulldown_cmark::HeadingLevel::H2 => HeadingLevel::H2, @@ -579,10 +577,10 @@ impl<'a> MarkdownParser<'a> { } } else { let block = self.parse_block().await; - if let Some(block) = block { - if let Some(list_item) = items_stack.last_mut() { - list_item.content.extend(block); - } + if let Some(block) = block + && let Some(list_item) = items_stack.last_mut() + { + list_item.content.extend(block); } } } diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index a0c8819991d68336a306af85a4dd709353222fa1..1121d64655f6c7e02f0b0d621605c9ba1aae7cde 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -115,8 +115,7 @@ impl MarkdownPreviewView { pane.activate_item(existing_follow_view_idx, true, true, window, cx); }); } else { - let view = - Self::create_following_markdown_view(workspace, editor.clone(), window, cx); + let view = Self::create_following_markdown_view(workspace, editor, window, cx); workspace.active_pane().update(cx, |pane, cx| { pane.add_item(Box::new(view.clone()), true, true, None, window, cx) }); @@ -151,10 +150,9 @@ impl MarkdownPreviewView { if let Some(editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) + && Self::is_markdown_file(&editor, cx) { - if Self::is_markdown_file(&editor, cx) { - return Some(editor); - } + return Some(editor); } None } @@ -243,32 +241,30 @@ impl MarkdownPreviewView { window: &mut Window, cx: &mut Context, ) { - if let Some(item) = active_item { - if item.item_id() != cx.entity_id() { - if let Some(editor) = item.act_as::(cx) { - if Self::is_markdown_file(&editor, cx) { - self.set_editor(editor, window, cx); - } - } - } + if let Some(item) = active_item + && item.item_id() != cx.entity_id() + && let Some(editor) = item.act_as::(cx) + && Self::is_markdown_file(&editor, cx) + { + self.set_editor(editor, window, cx); } } pub fn is_markdown_file(editor: &Entity, cx: &mut Context) -> bool { let buffer = editor.read(cx).buffer().read(cx); - if let Some(buffer) = buffer.as_singleton() { - if let Some(language) = buffer.read(cx).language() { - return language.name() == "Markdown".into(); - } + if let Some(buffer) = buffer.as_singleton() + && let Some(language) = buffer.read(cx).language() + { + return language.name() == "Markdown".into(); } false } fn set_editor(&mut self, editor: Entity, window: &mut Window, cx: &mut Context) { - if let Some(active) = &self.active_editor { - if active.editor == editor { - return; - } + if let Some(active) = &self.active_editor + && active.editor == editor + { + return; } let subscription = cx.subscribe_in( @@ -552,21 +548,20 @@ impl Render for MarkdownPreviewView { .group("markdown-block") .on_click(cx.listener( move |this, event: &ClickEvent, window, cx| { - if event.click_count() == 2 { - if let Some(source_range) = this + if event.click_count() == 2 + && let Some(source_range) = this .contents .as_ref() .and_then(|c| c.children.get(ix)) .and_then(|block: &ParsedMarkdownElement| { block.source_range() }) - { - this.move_cursor_to_block( - window, - cx, - source_range.start..source_range.start, - ); - } + { + this.move_cursor_to_block( + window, + cx, + source_range.start..source_range.start, + ); } }, )) diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index 37d2ca21105566f1e2e3271f49c75a3ce1d7846b..b0b10e927cb3bbc4f0b8366cc77b091c9df773d2 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -111,11 +111,10 @@ impl RenderContext { /// buffer font size changes. The callees of this function should be reimplemented to use real /// relative sizing once that is implemented in GPUI pub fn scaled_rems(&self, rems: f32) -> Rems { - return self - .buffer_text_style + self.buffer_text_style .font_size .to_rems(self.window_rem_size) - .mul(rems); + .mul(rems) } /// This ensures that children inside of block quotes @@ -459,13 +458,13 @@ fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) - let mut max_lengths: Vec = vec![0; parsed.header.children.len()]; for (index, cell) in parsed.header.children.iter().enumerate() { - let length = paragraph_len(&cell); + let length = paragraph_len(cell); max_lengths[index] = length; } for row in &parsed.body { for (index, cell) in row.children.iter().enumerate() { - let length = paragraph_len(&cell); + let length = paragraph_len(cell); if length > max_lengths[index] { max_lengths[index] = length; diff --git a/crates/migrator/src/migrations/m_2025_01_02/settings.rs b/crates/migrator/src/migrations/m_2025_01_02/settings.rs index 3ce85e6b2611b69dfaac5479ee3404eeda9c0ebc..a35b1ebd2e9d8e2c658de0623b7c2e8377662b18 100644 --- a/crates/migrator/src/migrations/m_2025_01_02/settings.rs +++ b/crates/migrator/src/migrations/m_2025_01_02/settings.rs @@ -20,14 +20,14 @@ fn replace_deprecated_settings_values( .nodes_for_capture_index(parent_object_capture_ix) .next()? .byte_range(); - let parent_object_name = contents.get(parent_object_range.clone())?; + let parent_object_name = contents.get(parent_object_range)?; let setting_name_ix = query.capture_index_for_name("setting_name")?; let setting_name_range = mat .nodes_for_capture_index(setting_name_ix) .next()? .byte_range(); - let setting_name = contents.get(setting_name_range.clone())?; + let setting_name = contents.get(setting_name_range)?; let setting_value_ix = query.capture_index_for_name("setting_value")?; let setting_value_range = mat diff --git a/crates/migrator/src/migrations/m_2025_01_29/keymap.rs b/crates/migrator/src/migrations/m_2025_01_29/keymap.rs index 646af8f63dc90b6ebe3faef9432eecc54140b438..eed2c46e0816452af6813ae699eab6cec1d65eec 100644 --- a/crates/migrator/src/migrations/m_2025_01_29/keymap.rs +++ b/crates/migrator/src/migrations/m_2025_01_29/keymap.rs @@ -242,22 +242,22 @@ static STRING_REPLACE: LazyLock> = LazyLock::new(|| { "inline_completion::ToggleMenu", "edit_prediction::ToggleMenu", ), - ("editor::NextEditPrediction", "editor::NextEditPrediction"), + ("editor::NextInlineCompletion", "editor::NextEditPrediction"), ( - "editor::PreviousEditPrediction", + "editor::PreviousInlineCompletion", "editor::PreviousEditPrediction", ), ( - "editor::AcceptPartialEditPrediction", + "editor::AcceptPartialInlineCompletion", "editor::AcceptPartialEditPrediction", ), - ("editor::ShowEditPrediction", "editor::ShowEditPrediction"), + ("editor::ShowInlineCompletion", "editor::ShowEditPrediction"), ( - "editor::AcceptEditPrediction", + "editor::AcceptInlineCompletion", "editor::AcceptEditPrediction", ), ( - "editor::ToggleEditPredictions", + "editor::ToggleInlineCompletions", "editor::ToggleEditPrediction", ), ]) @@ -279,7 +279,7 @@ fn rename_context_key( new_predicate = new_predicate.replace(old_key, new_key); } if new_predicate != old_predicate { - Some((context_predicate_range, new_predicate.to_string())) + Some((context_predicate_range, new_predicate)) } else { None } diff --git a/crates/migrator/src/migrations/m_2025_01_29/settings.rs b/crates/migrator/src/migrations/m_2025_01_29/settings.rs index 8d3261676b731d00e3dd85f3f5d94737931d74fe..46cfe2f178f1e4416cb404f26b5b77b55663aa29 100644 --- a/crates/migrator/src/migrations/m_2025_01_29/settings.rs +++ b/crates/migrator/src/migrations/m_2025_01_29/settings.rs @@ -57,7 +57,7 @@ pub fn replace_edit_prediction_provider_setting( .nodes_for_capture_index(parent_object_capture_ix) .next()? .byte_range(); - let parent_object_name = contents.get(parent_object_range.clone())?; + let parent_object_name = contents.get(parent_object_range)?; let setting_name_ix = query.capture_index_for_name("setting_name")?; let setting_range = mat diff --git a/crates/migrator/src/migrations/m_2025_01_30/settings.rs b/crates/migrator/src/migrations/m_2025_01_30/settings.rs index 23a3243b827b7d44e673208e56858b6cd2e8f2b7..2d763e4722cb2119f0b2f982b5841aab37e55c12 100644 --- a/crates/migrator/src/migrations/m_2025_01_30/settings.rs +++ b/crates/migrator/src/migrations/m_2025_01_30/settings.rs @@ -25,7 +25,7 @@ fn replace_tab_close_button_setting_key( .nodes_for_capture_index(parent_object_capture_ix) .next()? .byte_range(); - let parent_object_name = contents.get(parent_object_range.clone())?; + let parent_object_name = contents.get(parent_object_range)?; let setting_name_ix = query.capture_index_for_name("setting_name")?; let setting_range = mat @@ -51,14 +51,14 @@ fn replace_tab_close_button_setting_value( .nodes_for_capture_index(parent_object_capture_ix) .next()? .byte_range(); - let parent_object_name = contents.get(parent_object_range.clone())?; + let parent_object_name = contents.get(parent_object_range)?; let setting_name_ix = query.capture_index_for_name("setting_name")?; let setting_name_range = mat .nodes_for_capture_index(setting_name_ix) .next()? .byte_range(); - let setting_name = contents.get(setting_name_range.clone())?; + let setting_name = contents.get(setting_name_range)?; let setting_value_ix = query.capture_index_for_name("setting_value")?; let setting_value_range = mat diff --git a/crates/migrator/src/migrations/m_2025_03_29/settings.rs b/crates/migrator/src/migrations/m_2025_03_29/settings.rs index 47f65b407da2b7079fb68a4877275339d6309433..8f83d8e39ea050de0ec9291199804f0e62dab392 100644 --- a/crates/migrator/src/migrations/m_2025_03_29/settings.rs +++ b/crates/migrator/src/migrations/m_2025_03_29/settings.rs @@ -19,7 +19,7 @@ fn replace_setting_value( .nodes_for_capture_index(setting_capture_ix) .next()? .byte_range(); - let setting_name = contents.get(setting_name_range.clone())?; + let setting_name = contents.get(setting_name_range)?; if setting_name != "hide_mouse_while_typing" { return None; diff --git a/crates/migrator/src/migrations/m_2025_05_05/settings.rs b/crates/migrator/src/migrations/m_2025_05_05/settings.rs index 88c6c338d18bc9c648a6c09e8fe1755bc3f77cd9..77da1b9a077b4acc2e6df6d47713f8e15f0fd090 100644 --- a/crates/migrator/src/migrations/m_2025_05_05/settings.rs +++ b/crates/migrator/src/migrations/m_2025_05_05/settings.rs @@ -24,7 +24,7 @@ fn rename_assistant( .nodes_for_capture_index(key_capture_ix) .next()? .byte_range(); - return Some((key_range, "agent".to_string())); + Some((key_range, "agent".to_string())) } fn rename_edit_prediction_assistant( @@ -37,5 +37,5 @@ fn rename_edit_prediction_assistant( .nodes_for_capture_index(key_capture_ix) .next()? .byte_range(); - return Some((key_range, "enabled_in_text_threads".to_string())); + Some((key_range, "enabled_in_text_threads".to_string())) } diff --git a/crates/migrator/src/migrations/m_2025_05_29/settings.rs b/crates/migrator/src/migrations/m_2025_05_29/settings.rs index 56d72836fa396810db2a220f57b8144c939a872a..37ef0e45cc0730c9861ca4362a4b93f025002c6d 100644 --- a/crates/migrator/src/migrations/m_2025_05_29/settings.rs +++ b/crates/migrator/src/migrations/m_2025_05_29/settings.rs @@ -19,7 +19,7 @@ fn replace_preferred_completion_mode_value( .nodes_for_capture_index(parent_object_capture_ix) .next()? .byte_range(); - let parent_object_name = contents.get(parent_object_range.clone())?; + let parent_object_name = contents.get(parent_object_range)?; if parent_object_name != "agent" { return None; @@ -30,7 +30,7 @@ fn replace_preferred_completion_mode_value( .nodes_for_capture_index(setting_name_capture_ix) .next()? .byte_range(); - let setting_name = contents.get(setting_name_range.clone())?; + let setting_name = contents.get(setting_name_range)?; if setting_name != "preferred_completion_mode" { return None; diff --git a/crates/migrator/src/migrations/m_2025_06_16/settings.rs b/crates/migrator/src/migrations/m_2025_06_16/settings.rs index cce407e21b81bf9064c1261c142b216b622712a8..cd79eae2048ca9809b720b7913eba12b3e6cb1ce 100644 --- a/crates/migrator/src/migrations/m_2025_06_16/settings.rs +++ b/crates/migrator/src/migrations/m_2025_06_16/settings.rs @@ -40,20 +40,20 @@ fn migrate_context_server_settings( // Parse the server settings to check what keys it contains let mut cursor = server_settings.walk(); for child in server_settings.children(&mut cursor) { - if child.kind() == "pair" { - if let Some(key_node) = child.child_by_field_name("key") { - if let (None, Some(quote_content)) = (column, key_node.child(0)) { - column = Some(quote_content.start_position().column); - } - if let Some(string_content) = key_node.child(1) { - let key = &contents[string_content.byte_range()]; - match key { - // If it already has a source key, don't modify it - "source" => return None, - "command" => has_command = true, - "settings" => has_settings = true, - _ => other_keys += 1, - } + if child.kind() == "pair" + && let Some(key_node) = child.child_by_field_name("key") + { + if let (None, Some(quote_content)) = (column, key_node.child(0)) { + column = Some(quote_content.start_position().column); + } + if let Some(string_content) = key_node.child(1) { + let key = &contents[string_content.byte_range()]; + match key { + // If it already has a source key, don't modify it + "source" => return None, + "command" => has_command = true, + "settings" => has_settings = true, + _ => other_keys += 1, } } } diff --git a/crates/migrator/src/migrations/m_2025_06_25/settings.rs b/crates/migrator/src/migrations/m_2025_06_25/settings.rs index 5dd6c3093a43b00acff3db6c1e316a3fc6664175..2bf7658eeb9036c0b1d08d2af446c0aba788d402 100644 --- a/crates/migrator/src/migrations/m_2025_06_25/settings.rs +++ b/crates/migrator/src/migrations/m_2025_06_25/settings.rs @@ -84,10 +84,10 @@ fn remove_pair_with_whitespace( } } else { // If no next sibling, check if there's a comma before - if let Some(prev_sibling) = pair_node.prev_sibling() { - if prev_sibling.kind() == "," { - range_to_remove.start = prev_sibling.start_byte(); - } + if let Some(prev_sibling) = pair_node.prev_sibling() + && prev_sibling.kind() == "," + { + range_to_remove.start = prev_sibling.start_byte(); } } @@ -123,10 +123,10 @@ fn remove_pair_with_whitespace( // Also check if we need to include trailing whitespace up to the next line let text_after = &contents[range_to_remove.end..]; - if let Some(newline_pos) = text_after.find('\n') { - if text_after[..newline_pos].chars().all(|c| c.is_whitespace()) { - range_to_remove.end += newline_pos + 1; - } + if let Some(newline_pos) = text_after.find('\n') + && text_after[..newline_pos].chars().all(|c| c.is_whitespace()) + { + range_to_remove.end += newline_pos + 1; } Some((range_to_remove, String::new())) diff --git a/crates/migrator/src/migrations/m_2025_06_27/settings.rs b/crates/migrator/src/migrations/m_2025_06_27/settings.rs index 6156308fcec05dfb10b5b258d31077e5d4b09adc..e3e951b1a69e39d19e93a152a264750caf51a81e 100644 --- a/crates/migrator/src/migrations/m_2025_06_27/settings.rs +++ b/crates/migrator/src/migrations/m_2025_06_27/settings.rs @@ -56,19 +56,18 @@ fn flatten_context_server_command( let mut cursor = command_object.walk(); for child in command_object.children(&mut cursor) { - if child.kind() == "pair" { - if let Some(key_node) = child.child_by_field_name("key") { - if let Some(string_content) = key_node.child(1) { - let key = &contents[string_content.byte_range()]; - if let Some(value_node) = child.child_by_field_name("value") { - let value_range = value_node.byte_range(); - match key { - "path" => path_value = Some(&contents[value_range]), - "args" => args_value = Some(&contents[value_range]), - "env" => env_value = Some(&contents[value_range]), - _ => {} - } - } + if child.kind() == "pair" + && let Some(key_node) = child.child_by_field_name("key") + && let Some(string_content) = key_node.child(1) + { + let key = &contents[string_content.byte_range()]; + if let Some(value_node) = child.child_by_field_name("value") { + let value_range = value_node.byte_range(); + match key { + "path" => path_value = Some(&contents[value_range]), + "args" => args_value = Some(&contents[value_range]), + "env" => env_value = Some(&contents[value_range]), + _ => {} } } } diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index b425f7f1d5dc691ed1501d712ab72556412f7eb6..2180a049d03daf5fcd2a60e1f1f7ddd0013c7d1f 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -28,7 +28,7 @@ fn migrate(text: &str, patterns: MigrationPatterns, query: &Query) -> Result Result Result> { pub fn migrate_edit_prediction_provider_settings(text: &str) -> Result> { migrate( - &text, + text, &[( SETTINGS_NESTED_KEY_VALUE_PATTERN, migrations::m_2025_01_29::replace_edit_prediction_provider_setting, @@ -293,12 +293,12 @@ mod tests { use super::*; fn assert_migrate_keymap(input: &str, output: Option<&str>) { - let migrated = migrate_keymap(&input).unwrap(); + let migrated = migrate_keymap(input).unwrap(); pretty_assertions::assert_eq!(migrated.as_deref(), output); } fn assert_migrate_settings(input: &str, output: Option<&str>) { - let migrated = migrate_settings(&input).unwrap(); + let migrated = migrate_settings(input).unwrap(); pretty_assertions::assert_eq!(migrated.as_deref(), output); } diff --git a/crates/mistral/src/mistral.rs b/crates/mistral/src/mistral.rs index c466a598a0ca509654ec501614169c24c2094409..5b4d05377c7132f47828aa6afafbb5c850e940a8 100644 --- a/crates/mistral/src/mistral.rs +++ b/crates/mistral/src/mistral.rs @@ -86,6 +86,7 @@ pub enum Model { max_completion_tokens: Option, supports_tools: Option, supports_images: Option, + supports_thinking: Option, }, } @@ -214,6 +215,16 @@ impl Model { } => supports_images.unwrap_or(false), } } + + pub fn supports_thinking(&self) -> bool { + match self { + Self::MagistralMediumLatest | Self::MagistralSmallLatest => true, + Self::Custom { + supports_thinking, .. + } => supports_thinking.unwrap_or(false), + _ => false, + } + } } #[derive(Debug, Serialize, Deserialize)] @@ -288,7 +299,9 @@ pub enum ToolChoice { #[serde(tag = "role", rename_all = "lowercase")] pub enum RequestMessage { Assistant { - content: Option, + #[serde(flatten)] + #[serde(default, skip_serializing_if = "Option::is_none")] + content: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] tool_calls: Vec, }, @@ -297,7 +310,8 @@ pub enum RequestMessage { content: MessageContent, }, System { - content: String, + #[serde(flatten)] + content: MessageContent, }, Tool { content: String, @@ -305,7 +319,7 @@ pub enum RequestMessage { }, } -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] #[serde(untagged)] pub enum MessageContent { #[serde(rename = "content")] @@ -346,11 +360,21 @@ impl MessageContent { } } -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] #[serde(tag = "type", rename_all = "snake_case")] pub enum MessagePart { Text { text: String }, ImageUrl { image_url: String }, + Thinking { thinking: Vec }, +} + +// Backwards-compatibility alias for provider code that refers to ContentPart +pub type ContentPart = MessagePart; + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ThinkingPart { + Text { text: String }, } #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] @@ -418,24 +442,30 @@ pub struct StreamChoice { pub finish_reason: Option, } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct StreamDelta { pub role: Option, - pub content: Option, #[serde(default, skip_serializing_if = "Option::is_none")] - pub tool_calls: Option>, + pub content: Option, #[serde(default, skip_serializing_if = "Option::is_none")] - pub reasoning_content: Option, + pub tool_calls: Option>, } -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] +#[serde(untagged)] +pub enum MessageContentDelta { + Text(String), + Parts(Vec), +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] pub struct ToolCallChunk { pub index: usize, pub id: Option, pub function: Option, } -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] pub struct FunctionChunk { pub name: Option, pub arguments: Option, diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index 1305328d384023517dbb80d25e210b44e632eed8..6bed0a4028c5c4b0816355a397046560fb6b8618 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -76,27 +76,26 @@ impl Anchor { if text_cmp.is_ne() { return text_cmp; } - if self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some() { - if let Some(base_text) = snapshot + if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) + && let Some(base_text) = snapshot .diffs .get(&excerpt.buffer_id) .map(|diff| diff.base_text()) - { - let self_anchor = self.diff_base_anchor.filter(|a| base_text.can_resolve(a)); - let other_anchor = other.diff_base_anchor.filter(|a| base_text.can_resolve(a)); - return match (self_anchor, other_anchor) { - (Some(a), Some(b)) => a.cmp(&b, base_text), - (Some(_), None) => match other.text_anchor.bias { - Bias::Left => Ordering::Greater, - Bias::Right => Ordering::Less, - }, - (None, Some(_)) => match self.text_anchor.bias { - Bias::Left => Ordering::Less, - Bias::Right => Ordering::Greater, - }, - (None, None) => Ordering::Equal, - }; - } + { + let self_anchor = self.diff_base_anchor.filter(|a| base_text.can_resolve(a)); + let other_anchor = other.diff_base_anchor.filter(|a| base_text.can_resolve(a)); + return match (self_anchor, other_anchor) { + (Some(a), Some(b)) => a.cmp(&b, base_text), + (Some(_), None) => match other.text_anchor.bias { + Bias::Left => Ordering::Greater, + Bias::Right => Ordering::Less, + }, + (None, Some(_)) => match self.text_anchor.bias { + Bias::Left => Ordering::Less, + Bias::Right => Ordering::Greater, + }, + (None, None) => Ordering::Equal, + }; } } Ordering::Equal @@ -107,51 +106,49 @@ impl Anchor { } pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Left { - if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) { - return Self { - buffer_id: self.buffer_id, - excerpt_id: self.excerpt_id, - text_anchor: self.text_anchor.bias_left(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diffs - .get(&excerpt.buffer_id) - .map(|diff| diff.base_text()) - { - if a.buffer_id == Some(base_text.remote_id()) { - return a.bias_left(base_text); - } - } - a - }), - }; - } + if self.text_anchor.bias != Bias::Left + && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) + { + return Self { + buffer_id: self.buffer_id, + excerpt_id: self.excerpt_id, + text_anchor: self.text_anchor.bias_left(&excerpt.buffer), + diff_base_anchor: self.diff_base_anchor.map(|a| { + if let Some(base_text) = snapshot + .diffs + .get(&excerpt.buffer_id) + .map(|diff| diff.base_text()) + && a.buffer_id == Some(base_text.remote_id()) + { + return a.bias_left(base_text); + } + a + }), + }; } *self } pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Right { - if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) { - return Self { - buffer_id: self.buffer_id, - excerpt_id: self.excerpt_id, - text_anchor: self.text_anchor.bias_right(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diffs - .get(&excerpt.buffer_id) - .map(|diff| diff.base_text()) - { - if a.buffer_id == Some(base_text.remote_id()) { - return a.bias_right(&base_text); - } - } - a - }), - }; - } + if self.text_anchor.bias != Bias::Right + && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) + { + return Self { + buffer_id: self.buffer_id, + excerpt_id: self.excerpt_id, + text_anchor: self.text_anchor.bias_right(&excerpt.buffer), + diff_base_anchor: self.diff_base_anchor.map(|a| { + if let Some(base_text) = snapshot + .diffs + .get(&excerpt.buffer_id) + .map(|diff| diff.base_text()) + && a.buffer_id == Some(base_text.remote_id()) + { + return a.bias_right(base_text); + } + a + }), + }; } *self } @@ -212,7 +209,7 @@ impl AnchorRangeExt for Range { } fn includes(&self, other: &Range, buffer: &MultiBufferSnapshot) -> bool { - self.start.cmp(&other.start, &buffer).is_le() && other.end.cmp(&self.end, &buffer).is_le() + self.start.cmp(&other.start, buffer).is_le() && other.end.cmp(&self.end, buffer).is_le() } fn overlaps(&self, other: &Range, buffer: &MultiBufferSnapshot) -> bool { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index eb12e6929cbc4bf74f44a2cb6eb9970c825d0fe3..a54d38163da211883985d8eee798e29730da1938 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1082,11 +1082,11 @@ impl MultiBuffer { let mut ranges: Vec> = Vec::new(); for edit in edits { - if let Some(last_range) = ranges.last_mut() { - if edit.range.start <= last_range.end { - last_range.end = last_range.end.max(edit.range.end); - continue; - } + if let Some(last_range) = ranges.last_mut() + && edit.range.start <= last_range.end + { + last_range.end = last_range.end.max(edit.range.end); + continue; } ranges.push(edit.range); } @@ -1146,13 +1146,13 @@ impl MultiBuffer { pub fn last_transaction_id(&self, cx: &App) -> Option { if let Some(buffer) = self.as_singleton() { - return buffer + buffer .read(cx) .peek_undo_stack() - .map(|history_entry| history_entry.transaction_id()); + .map(|history_entry| history_entry.transaction_id()) } else { let last_transaction = self.history.undo_stack.last()?; - return Some(last_transaction.id); + Some(last_transaction.id) } } @@ -1212,25 +1212,24 @@ impl MultiBuffer { for range in buffer.edited_ranges_for_transaction_id::(*buffer_transaction) { for excerpt_id in &buffer_state.excerpts { cursor.seek(excerpt_id, Bias::Left); - if let Some(excerpt) = cursor.item() { - if excerpt.locator == *excerpt_id { - let excerpt_buffer_start = - excerpt.range.context.start.summary::(buffer); - let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); - let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; - if excerpt_range.contains(&range.start) - && excerpt_range.contains(&range.end) - { - let excerpt_start = D::from_text_summary(&cursor.start().text); + if let Some(excerpt) = cursor.item() + && excerpt.locator == *excerpt_id + { + let excerpt_buffer_start = excerpt.range.context.start.summary::(buffer); + let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); + let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; + if excerpt_range.contains(&range.start) + && excerpt_range.contains(&range.end) + { + let excerpt_start = D::from_text_summary(&cursor.start().text); - let mut start = excerpt_start; - start.add_assign(&(range.start - excerpt_buffer_start)); - let mut end = excerpt_start; - end.add_assign(&(range.end - excerpt_buffer_start)); + let mut start = excerpt_start; + start.add_assign(&(range.start - excerpt_buffer_start)); + let mut end = excerpt_start; + end.add_assign(&(range.end - excerpt_buffer_start)); - ranges.push(start..end); - break; - } + ranges.push(start..end); + break; } } } @@ -1251,25 +1250,25 @@ impl MultiBuffer { buffer.update(cx, |buffer, _| { buffer.merge_transactions(transaction, destination) }); - } else if let Some(transaction) = self.history.forget(transaction) { - if let Some(destination) = self.history.transaction_mut(destination) { - for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { - if let Some(destination_buffer_transaction_id) = - destination.buffer_transactions.get(&buffer_id) - { - if let Some(state) = self.buffers.borrow().get(&buffer_id) { - state.buffer.update(cx, |buffer, _| { - buffer.merge_transactions( - buffer_transaction_id, - *destination_buffer_transaction_id, - ) - }); - } - } else { - destination - .buffer_transactions - .insert(buffer_id, buffer_transaction_id); + } else if let Some(transaction) = self.history.forget(transaction) + && let Some(destination) = self.history.transaction_mut(destination) + { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(destination_buffer_transaction_id) = + destination.buffer_transactions.get(&buffer_id) + { + if let Some(state) = self.buffers.borrow().get(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.merge_transactions( + buffer_transaction_id, + *destination_buffer_transaction_id, + ) + }); } + } else { + destination + .buffer_transactions + .insert(buffer_id, buffer_transaction_id); } } } @@ -1562,11 +1561,11 @@ impl MultiBuffer { }); let mut merged_ranges: Vec> = Vec::new(); for range in expanded_ranges { - if let Some(last_range) = merged_ranges.last_mut() { - if last_range.context.end >= range.context.start { - last_range.context.end = range.context.end; - continue; - } + if let Some(last_range) = merged_ranges.last_mut() + && last_range.context.end >= range.context.start + { + last_range.context.end = range.context.end; + continue; } merged_ranges.push(range) } @@ -1686,7 +1685,7 @@ impl MultiBuffer { cx: &mut Context, ) -> (Vec>, bool) { let (excerpt_ids, added_a_new_excerpt) = - self.update_path_excerpts(path, buffer, &buffer_snapshot, new, cx); + self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); let mut result = Vec::new(); let mut ranges = ranges.into_iter(); @@ -1726,7 +1725,7 @@ impl MultiBuffer { merged_ranges.push(range.clone()); counts.push(1); } - return (merged_ranges, counts); + (merged_ranges, counts) } fn update_path_excerpts( @@ -1784,7 +1783,7 @@ impl MultiBuffer { } Some(( *existing_id, - excerpt.range.context.to_point(&buffer_snapshot), + excerpt.range.context.to_point(buffer_snapshot), )) } else { None @@ -1794,25 +1793,25 @@ impl MultiBuffer { }; if let Some((last_id, last)) = to_insert.last_mut() { - if let Some(new) = new { - if last.context.end >= new.context.start { - last.context.end = last.context.end.max(new.context.end); - excerpt_ids.push(*last_id); - new_iter.next(); - continue; - } + if let Some(new) = new + && last.context.end >= new.context.start + { + last.context.end = last.context.end.max(new.context.end); + excerpt_ids.push(*last_id); + new_iter.next(); + continue; } - if let Some((existing_id, existing_range)) = &existing { - if last.context.end >= existing_range.start { - last.context.end = last.context.end.max(existing_range.end); - to_remove.push(*existing_id); - self.snapshot - .borrow_mut() - .replaced_excerpts - .insert(*existing_id, *last_id); - existing_iter.next(); - continue; - } + if let Some((existing_id, existing_range)) = &existing + && last.context.end >= existing_range.start + { + last.context.end = last.context.end.max(existing_range.end); + to_remove.push(*existing_id); + self.snapshot + .borrow_mut() + .replaced_excerpts + .insert(*existing_id, *last_id); + existing_iter.next(); + continue; } } @@ -2105,10 +2104,10 @@ impl MultiBuffer { .flatten() { cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() { - if excerpt.locator == *locator { - excerpts.push((excerpt.id, excerpt.range.clone())); - } + if let Some(excerpt) = cursor.item() + && excerpt.locator == *locator + { + excerpts.push((excerpt.id, excerpt.range.clone())); } } @@ -2132,22 +2131,21 @@ impl MultiBuffer { let mut result = Vec::new(); for locator in locators { excerpts.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts.item() { - if excerpt.locator == *locator { - let excerpt_start = excerpts.start().1.clone(); - let excerpt_end = - ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines); + if let Some(excerpt) = excerpts.item() + && excerpt.locator == *locator + { + let excerpt_start = excerpts.start().1.clone(); + let excerpt_end = ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines); - diff_transforms.seek_forward(&excerpt_start, Bias::Left); - let overshoot = excerpt_start.0 - diff_transforms.start().0.0; - let start = diff_transforms.start().1.0 + overshoot; + diff_transforms.seek_forward(&excerpt_start, Bias::Left); + let overshoot = excerpt_start.0 - diff_transforms.start().0.0; + let start = diff_transforms.start().1.0 + overshoot; - diff_transforms.seek_forward(&excerpt_end, Bias::Right); - let overshoot = excerpt_end.0 - diff_transforms.start().0.0; - let end = diff_transforms.start().1.0 + overshoot; + diff_transforms.seek_forward(&excerpt_end, Bias::Right); + let overshoot = excerpt_end.0 - diff_transforms.start().0.0; + let end = diff_transforms.start().1.0 + overshoot; - result.push(start..end) - } + result.push(start..end) } } result @@ -2198,6 +2196,15 @@ impl MultiBuffer { }) } + pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option> { + if let Some(buffer_id) = anchor.buffer_id { + self.buffer(buffer_id) + } else { + let (_, buffer, _) = self.excerpt_containing(anchor, cx)?; + Some(buffer) + } + } + // If point is at the end of the buffer, the last excerpt is returned pub fn point_to_buffer_offset( &self, @@ -2316,12 +2323,12 @@ impl MultiBuffer { // Skip over any subsequent excerpts that are also removed. if let Some(&next_excerpt_id) = excerpt_ids.peek() { let next_locator = snapshot.excerpt_locator_for_id(next_excerpt_id); - if let Some(next_excerpt) = cursor.item() { - if next_excerpt.locator == *next_locator { - excerpt_ids.next(); - excerpt = next_excerpt; - continue 'remove_excerpts; - } + if let Some(next_excerpt) = cursor.item() + && next_excerpt.locator == *next_locator + { + excerpt_ids.next(); + excerpt = next_excerpt; + continue 'remove_excerpts; } } @@ -2429,7 +2436,7 @@ impl MultiBuffer { cx.emit(match event { language::BufferEvent::Edited => Event::Edited { singleton_buffer_edited: true, - edited_buffer: Some(buffer.clone()), + edited_buffer: Some(buffer), }, language::BufferEvent::DirtyChanged => Event::DirtyChanged, language::BufferEvent::Saved => Event::Saved, @@ -2484,7 +2491,7 @@ impl MultiBuffer { let base_text_changed = snapshot .diffs .get(&buffer_id) - .map_or(true, |old_diff| !new_diff.base_texts_eq(old_diff)); + .is_none_or(|old_diff| !new_diff.base_texts_eq(old_diff)); snapshot.diffs.insert(buffer_id, new_diff); @@ -2494,33 +2501,33 @@ impl MultiBuffer { .excerpts .cursor::, ExcerptOffset>>(&()); cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() { - if excerpt.locator == *locator { - let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); - if diff_change_range.end < excerpt_buffer_range.start - || diff_change_range.start > excerpt_buffer_range.end - { - continue; - } - let excerpt_start = cursor.start().1; - let excerpt_len = ExcerptOffset::new(excerpt.text_summary.len); - let diff_change_start_in_excerpt = ExcerptOffset::new( - diff_change_range - .start - .saturating_sub(excerpt_buffer_range.start), - ); - let diff_change_end_in_excerpt = ExcerptOffset::new( - diff_change_range - .end - .saturating_sub(excerpt_buffer_range.start), - ); - let edit_start = excerpt_start + diff_change_start_in_excerpt.min(excerpt_len); - let edit_end = excerpt_start + diff_change_end_in_excerpt.min(excerpt_len); - excerpt_edits.push(Edit { - old: edit_start..edit_end, - new: edit_start..edit_end, - }); + if let Some(excerpt) = cursor.item() + && excerpt.locator == *locator + { + let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); + if diff_change_range.end < excerpt_buffer_range.start + || diff_change_range.start > excerpt_buffer_range.end + { + continue; } + let excerpt_start = cursor.start().1; + let excerpt_len = ExcerptOffset::new(excerpt.text_summary.len); + let diff_change_start_in_excerpt = ExcerptOffset::new( + diff_change_range + .start + .saturating_sub(excerpt_buffer_range.start), + ); + let diff_change_end_in_excerpt = ExcerptOffset::new( + diff_change_range + .end + .saturating_sub(excerpt_buffer_range.start), + ); + let edit_start = excerpt_start + diff_change_start_in_excerpt.min(excerpt_len); + let edit_end = excerpt_start + diff_change_end_in_excerpt.min(excerpt_len); + excerpt_edits.push(Edit { + old: edit_start..edit_end, + new: edit_start..edit_end, + }); } } @@ -2778,7 +2785,7 @@ impl MultiBuffer { if diff_hunk.excerpt_id.cmp(&end_excerpt_id, &snapshot).is_gt() { continue; } - if last_hunk_row.map_or(false, |row| row >= diff_hunk.row_range.start) { + if last_hunk_row.is_some_and(|row| row >= diff_hunk.row_range.start) { continue; } let start = Anchor::in_buffer( @@ -3042,7 +3049,7 @@ impl MultiBuffer { is_dirty |= buffer.is_dirty(); has_deleted_file |= buffer .file() - .map_or(false, |file| file.disk_state() == DiskState::Deleted); + .is_some_and(|file| file.disk_state() == DiskState::Deleted); has_conflict |= buffer.has_conflict(); } if edited { @@ -3056,7 +3063,7 @@ impl MultiBuffer { snapshot.has_conflict = has_conflict; for (id, diff) in self.diffs.iter() { - if snapshot.diffs.get(&id).is_none() { + if snapshot.diffs.get(id).is_none() { snapshot.diffs.insert(*id, diff.diff.read(cx).snapshot(cx)); } } @@ -3155,13 +3162,12 @@ impl MultiBuffer { at_transform_boundary = false; let transforms_before_edit = old_diff_transforms.slice(&edit.old.start, Bias::Left); self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit); - if let Some(transform) = old_diff_transforms.item() { - if old_diff_transforms.end().0 == edit.old.start - && old_diff_transforms.start().0 < edit.old.start - { - self.push_diff_transform(&mut new_diff_transforms, transform.clone()); - old_diff_transforms.next(); - } + if let Some(transform) = old_diff_transforms.item() + && old_diff_transforms.end().0 == edit.old.start + && old_diff_transforms.start().0 < edit.old.start + { + self.push_diff_transform(&mut new_diff_transforms, transform.clone()); + old_diff_transforms.next(); } } @@ -3177,7 +3183,7 @@ impl MultiBuffer { &mut new_diff_transforms, &mut end_of_current_insert, &mut old_expanded_hunks, - &snapshot, + snapshot, change_kind, ); @@ -3201,9 +3207,10 @@ impl MultiBuffer { // If this is the last edit that intersects the current diff transform, // then recreate the content up to the end of this transform, to prepare // for reusing additional slices of the old transforms. - if excerpt_edits.peek().map_or(true, |next_edit| { - next_edit.old.start >= old_diff_transforms.end().0 - }) { + if excerpt_edits + .peek() + .is_none_or(|next_edit| next_edit.old.start >= old_diff_transforms.end().0) + { let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end) && match old_diff_transforms.item() { Some(DiffTransform::BufferContent { @@ -3223,7 +3230,7 @@ impl MultiBuffer { old_expanded_hunks.clear(); self.push_buffer_content_transform( - &snapshot, + snapshot, &mut new_diff_transforms, excerpt_offset, end_of_current_insert, @@ -3431,18 +3438,17 @@ impl MultiBuffer { inserted_hunk_info, summary, }) = subtree.first() - { - if self.extend_last_buffer_content_transform( + && self.extend_last_buffer_content_transform( new_transforms, *inserted_hunk_info, *summary, - ) { - let mut cursor = subtree.cursor::<()>(&()); - cursor.next(); - cursor.next(); - new_transforms.append(cursor.suffix(), &()); - return; - } + ) + { + let mut cursor = subtree.cursor::<()>(&()); + cursor.next(); + cursor.next(); + new_transforms.append(cursor.suffix(), &()); + return; } new_transforms.append(subtree, &()); } @@ -3456,14 +3462,13 @@ impl MultiBuffer { inserted_hunk_info: inserted_hunk_anchor, summary, } = transform - { - if self.extend_last_buffer_content_transform( + && self.extend_last_buffer_content_transform( new_transforms, inserted_hunk_anchor, summary, - ) { - return; - } + ) + { + return; } new_transforms.push(transform, &()); } @@ -3518,11 +3523,10 @@ impl MultiBuffer { summary, inserted_hunk_info: inserted_hunk_anchor, } = last_transform + && *inserted_hunk_anchor == new_inserted_hunk_info { - if *inserted_hunk_anchor == new_inserted_hunk_info { - *summary += summary_to_add; - did_extend = true; - } + *summary += summary_to_add; + did_extend = true; } }, &(), @@ -3565,9 +3569,7 @@ impl MultiBuffer { let multi = cx.new(|_| Self::new(Capability::ReadWrite)); for (text, ranges) in excerpts { let buffer = cx.new(|cx| Buffer::local(text, cx)); - let excerpt_ranges = ranges - .into_iter() - .map(|range| ExcerptRange::new(range.clone())); + let excerpt_ranges = ranges.into_iter().map(ExcerptRange::new); multi.update(cx, |multi, cx| { multi.push_excerpts(buffer, excerpt_ranges, cx) }); @@ -3601,7 +3603,7 @@ impl MultiBuffer { let mut edits: Vec<(Range, Arc)> = Vec::new(); let mut last_end = None; for _ in 0..edit_count { - if last_end.map_or(false, |last_end| last_end >= snapshot.len()) { + if last_end.is_some_and(|last_end| last_end >= snapshot.len()) { break; } @@ -3916,8 +3918,8 @@ impl MultiBufferSnapshot { &self, range: Range, ) -> Vec<(&BufferSnapshot, Range, ExcerptId)> { - let start = range.start.to_offset(&self); - let end = range.end.to_offset(&self); + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); let mut cursor = self.cursor::(); cursor.seek(&start); @@ -3955,8 +3957,8 @@ impl MultiBufferSnapshot { &self, range: Range, ) -> impl Iterator, ExcerptId, Option)> + '_ { - let start = range.start.to_offset(&self); - let end = range.end.to_offset(&self); + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); let mut cursor = self.cursor::(); cursor.seek(&start); @@ -4037,10 +4039,10 @@ impl MultiBufferSnapshot { cursor.seek(&query_range.start); - if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) { - if region.range.start > D::zero(&()) { - cursor.prev() - } + if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) + && region.range.start > D::zero(&()) + { + cursor.prev() } iter::from_fn(move || { @@ -4070,19 +4072,15 @@ impl MultiBufferSnapshot { buffer_start = cursor.main_buffer_position()?; }; let mut buffer_end = excerpt.range.context.end.summary::(&excerpt.buffer); - if let Some((end_excerpt_id, end_buffer_offset)) = range_end { - if excerpt.id == end_excerpt_id { - buffer_end = buffer_end.min(end_buffer_offset); - } - } - - if let Some(iterator) = - get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end) + if let Some((end_excerpt_id, end_buffer_offset)) = range_end + && excerpt.id == end_excerpt_id { - Some(&mut current_excerpt_metadata.insert((excerpt.id, iterator)).1) - } else { - None + buffer_end = buffer_end.min(end_buffer_offset); } + + get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end).map(|iterator| { + &mut current_excerpt_metadata.insert((excerpt.id, iterator)).1 + }) }; // Visit each metadata item. @@ -4144,10 +4142,10 @@ impl MultiBufferSnapshot { // When there are no more metadata items for this excerpt, move to the next excerpt. else { current_excerpt_metadata.take(); - if let Some((end_excerpt_id, _)) = range_end { - if excerpt.id == end_excerpt_id { - return None; - } + if let Some((end_excerpt_id, _)) = range_end + && excerpt.id == end_excerpt_id + { + return None; } cursor.next_excerpt(); } @@ -4186,7 +4184,7 @@ impl MultiBufferSnapshot { } let start = Anchor::in_buffer(excerpt.id, excerpt.buffer_id, hunk.buffer_range.start) - .to_point(&self); + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4204,7 +4202,7 @@ impl MultiBufferSnapshot { continue; }; let start = Anchor::in_buffer(excerpt.id, excerpt.buffer_id, hunk.buffer_range.start) - .to_point(&self); + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4455,7 +4453,7 @@ impl MultiBufferSnapshot { let mut buffer_position = region.buffer_range.start; buffer_position.add_assign(&overshoot); let clipped_buffer_position = - clip_buffer_position(®ion.buffer, buffer_position, bias); + clip_buffer_position(region.buffer, buffer_position, bias); let mut position = region.range.start; position.add_assign(&(clipped_buffer_position - region.buffer_range.start)); position @@ -4485,7 +4483,7 @@ impl MultiBufferSnapshot { let buffer_start_value = region.buffer_range.start.value.unwrap(); let mut buffer_key = buffer_start_key; buffer_key.add_assign(&(key - start_key)); - let buffer_value = convert_buffer_dimension(®ion.buffer, buffer_key); + let buffer_value = convert_buffer_dimension(region.buffer, buffer_key); let mut result = start_value; result.add_assign(&(buffer_value - buffer_start_value)); result @@ -4622,20 +4620,20 @@ impl MultiBufferSnapshot { pub fn indent_and_comment_for_line(&self, row: MultiBufferRow, cx: &App) -> String { let mut indent = self.indent_size_for_line(row).chars().collect::(); - if self.language_settings(cx).extend_comment_on_newline { - if let Some(language_scope) = self.language_scope_at(Point::new(row.0, 0)) { - let delimiters = language_scope.line_comment_prefixes(); - for delimiter in delimiters { - if *self - .chars_at(Point::new(row.0, indent.len() as u32)) - .take(delimiter.chars().count()) - .collect::() - .as_str() - == **delimiter - { - indent.push_str(&delimiter); - break; - } + if self.language_settings(cx).extend_comment_on_newline + && let Some(language_scope) = self.language_scope_at(Point::new(row.0, 0)) + { + let delimiters = language_scope.line_comment_prefixes(); + for delimiter in delimiters { + if *self + .chars_at(Point::new(row.0, indent.len() as u32)) + .take(delimiter.chars().count()) + .collect::() + .as_str() + == **delimiter + { + indent.push_str(delimiter); + break; } } } @@ -4655,7 +4653,7 @@ impl MultiBufferSnapshot { return true; } } - return true; + true } pub fn prev_non_blank_row(&self, mut row: MultiBufferRow) -> Option { @@ -4893,25 +4891,22 @@ impl MultiBufferSnapshot { base_text_byte_range, .. }) => { - if let Some(diff_base_anchor) = &anchor.diff_base_anchor { - if let Some(base_text) = + if let Some(diff_base_anchor) = &anchor.diff_base_anchor + && let Some(base_text) = self.diffs.get(buffer_id).map(|diff| diff.base_text()) + && base_text.can_resolve(diff_base_anchor) + { + let base_text_offset = diff_base_anchor.to_offset(base_text); + if base_text_offset >= base_text_byte_range.start + && base_text_offset <= base_text_byte_range.end { - if base_text.can_resolve(&diff_base_anchor) { - let base_text_offset = diff_base_anchor.to_offset(&base_text); - if base_text_offset >= base_text_byte_range.start - && base_text_offset <= base_text_byte_range.end - { - let position_in_hunk = base_text - .text_summary_for_range::( - base_text_byte_range.start..base_text_offset, - ); - position.add_assign(&position_in_hunk); - } else if at_transform_end { - diff_transforms.next(); - continue; - } - } + let position_in_hunk = base_text.text_summary_for_range::( + base_text_byte_range.start..base_text_offset, + ); + position.add_assign(&position_in_hunk); + } else if at_transform_end { + diff_transforms.next(); + continue; } } } @@ -4941,20 +4936,19 @@ impl MultiBufferSnapshot { } let mut position = cursor.start().1; - if let Some(excerpt) = cursor.item() { - if excerpt.id == anchor.excerpt_id { - let excerpt_buffer_start = excerpt - .buffer - .offset_for_anchor(&excerpt.range.context.start); - let excerpt_buffer_end = - excerpt.buffer.offset_for_anchor(&excerpt.range.context.end); - let buffer_position = cmp::min( - excerpt_buffer_end, - excerpt.buffer.offset_for_anchor(&anchor.text_anchor), - ); - if buffer_position > excerpt_buffer_start { - position.value += buffer_position - excerpt_buffer_start; - } + if let Some(excerpt) = cursor.item() + && excerpt.id == anchor.excerpt_id + { + let excerpt_buffer_start = excerpt + .buffer + .offset_for_anchor(&excerpt.range.context.start); + let excerpt_buffer_end = excerpt.buffer.offset_for_anchor(&excerpt.range.context.end); + let buffer_position = cmp::min( + excerpt_buffer_end, + excerpt.buffer.offset_for_anchor(&anchor.text_anchor), + ); + if buffer_position > excerpt_buffer_start { + position.value += buffer_position - excerpt_buffer_start; } } position @@ -4964,7 +4958,7 @@ impl MultiBufferSnapshot { while let Some(replacement) = self.replaced_excerpts.get(&excerpt_id) { excerpt_id = *replacement; } - return excerpt_id; + excerpt_id } pub fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec @@ -5082,9 +5076,9 @@ impl MultiBufferSnapshot { if point == region.range.end.key && region.has_trailing_newline { position.add_assign(&D::from_text_summary(&TextSummary::newline())); } - return Some(position); + Some(position) } else { - return Some(D::from_text_summary(&self.text_summary())); + Some(D::from_text_summary(&self.text_summary())) } }) } @@ -5124,7 +5118,7 @@ impl MultiBufferSnapshot { // Leave min and max anchors unchanged if invalid or // if the old excerpt still exists at this location let mut kept_position = next_excerpt - .map_or(false, |e| e.id == old_excerpt_id && e.contains(&anchor)) + .is_some_and(|e| e.id == old_excerpt_id && e.contains(&anchor)) || old_excerpt_id == ExcerptId::max() || old_excerpt_id == ExcerptId::min(); @@ -5211,15 +5205,12 @@ impl MultiBufferSnapshot { .cursor::>(&()); diff_transforms.seek(&offset, Bias::Right); - if offset == diff_transforms.start().0 && bias == Bias::Left { - if let Some(prev_item) = diff_transforms.prev_item() { - match prev_item { - DiffTransform::DeletedHunk { .. } => { - diff_transforms.prev(); - } - _ => {} - } - } + if offset == diff_transforms.start().0 + && bias == Bias::Left + && let Some(prev_item) = diff_transforms.prev_item() + && let DiffTransform::DeletedHunk { .. } = prev_item + { + diff_transforms.prev(); } let offset_in_transform = offset - diff_transforms.start().0; let mut excerpt_offset = diff_transforms.start().1; @@ -5246,15 +5237,6 @@ impl MultiBufferSnapshot { excerpt_offset += ExcerptOffset::new(offset_in_transform); }; - if let Some((excerpt_id, buffer_id, buffer)) = self.as_singleton() { - return Anchor { - buffer_id: Some(buffer_id), - excerpt_id: *excerpt_id, - text_anchor: buffer.anchor_at(excerpt_offset.value, bias), - diff_base_anchor, - }; - } - let mut excerpts = self .excerpts .cursor::>>(&()); @@ -5278,10 +5260,17 @@ impl MultiBufferSnapshot { text_anchor, diff_base_anchor, } - } else if excerpt_offset.is_zero() && bias == Bias::Left { - Anchor::min() } else { - Anchor::max() + let mut anchor = if excerpt_offset.is_zero() && bias == Bias::Left { + Anchor::min() + } else { + Anchor::max() + }; + // TODO this is a hack, remove it + if let Some((excerpt_id, _, _)) = self.as_singleton() { + anchor.excerpt_id = *excerpt_id; + } + anchor } } @@ -5296,17 +5285,17 @@ impl MultiBufferSnapshot { let locator = self.excerpt_locator_for_id(excerpt_id); let mut cursor = self.excerpts.cursor::>(&()); cursor.seek(locator, Bias::Left); - if let Some(excerpt) = cursor.item() { - if excerpt.id == excerpt_id { - let text_anchor = excerpt.clip_anchor(text_anchor); - drop(cursor); - return Some(Anchor { - buffer_id: Some(excerpt.buffer_id), - excerpt_id, - text_anchor, - diff_base_anchor: None, - }); - } + if let Some(excerpt) = cursor.item() + && excerpt.id == excerpt_id + { + let text_anchor = excerpt.clip_anchor(text_anchor); + drop(cursor); + return Some(Anchor { + buffer_id: Some(excerpt.buffer_id), + excerpt_id, + text_anchor, + diff_base_anchor: None, + }); } None } @@ -5491,7 +5480,7 @@ impl MultiBufferSnapshot { let range_filter = |open: Range, close: Range| -> bool { excerpt_buffer_range.contains(&open.start) && excerpt_buffer_range.contains(&close.end) - && range_filter.map_or(true, |filter| filter(buffer, open, close)) + && range_filter.is_none_or(|filter| filter(buffer, open, close)) }; let (open, close) = excerpt.buffer().innermost_enclosing_bracket_ranges( @@ -5651,10 +5640,10 @@ impl MultiBufferSnapshot { .buffer .line_indents_in_row_range(buffer_start_row..buffer_end_row); cursor.next(); - return Some(line_indents.map(move |(buffer_row, indent)| { + Some(line_indents.map(move |(buffer_row, indent)| { let row = region.range.start.row + (buffer_row - region.buffer_range.start.row); (MultiBufferRow(row), indent, ®ion.excerpt.buffer) - })); + })) }) .flatten() } @@ -5691,10 +5680,10 @@ impl MultiBufferSnapshot { .buffer .reversed_line_indents_in_row_range(buffer_start_row..buffer_end_row); cursor.prev(); - return Some(line_indents.map(move |(buffer_row, indent)| { + Some(line_indents.map(move |(buffer_row, indent)| { let row = region.range.start.row + (buffer_row - region.buffer_range.start.row); (MultiBufferRow(row), indent, ®ion.excerpt.buffer) - })); + })) }) .flatten() } @@ -5860,10 +5849,10 @@ impl MultiBufferSnapshot { let current_depth = indent_stack.len() as u32; // Avoid retrieving the language settings repeatedly for every buffer row. - if let Some((prev_buffer_id, _)) = &prev_settings { - if prev_buffer_id != &buffer.remote_id() { - prev_settings.take(); - } + if let Some((prev_buffer_id, _)) = &prev_settings + && prev_buffer_id != &buffer.remote_id() + { + prev_settings.take(); } let settings = &prev_settings .get_or_insert_with(|| { @@ -6192,10 +6181,10 @@ impl MultiBufferSnapshot { } else { let mut cursor = self.excerpt_ids.cursor::(&()); cursor.seek(&id, Bias::Left); - if let Some(entry) = cursor.item() { - if entry.id == id { - return &entry.locator; - } + if let Some(entry) = cursor.item() + && entry.id == id + { + return &entry.locator; } panic!("invalid excerpt id {id:?}") } @@ -6272,10 +6261,10 @@ impl MultiBufferSnapshot { pub fn buffer_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { let mut cursor = self.excerpts.cursor::>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); - if cursor.seek(&Some(locator), Bias::Left) { - if let Some(excerpt) = cursor.item() { - return Some(excerpt.range.context.clone()); - } + if cursor.seek(&Some(locator), Bias::Left) + && let Some(excerpt) = cursor.item() + { + return Some(excerpt.range.context.clone()); } None } @@ -6284,10 +6273,10 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); cursor.seek(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() { - if excerpt.id == excerpt_id { - return Some(excerpt); - } + if let Some(excerpt) = cursor.item() + && excerpt.id == excerpt_id + { + return Some(excerpt); } None } @@ -6323,6 +6312,14 @@ impl MultiBufferSnapshot { }) } + pub fn buffer_id_for_anchor(&self, anchor: Anchor) -> Option { + if let Some(id) = anchor.buffer_id { + return Some(id); + } + let excerpt = self.excerpt_containing(anchor..anchor)?; + Some(excerpt.buffer_id()) + } + pub fn selections_in_range<'a>( &'a self, range: &'a Range, @@ -6418,7 +6415,7 @@ impl MultiBufferSnapshot { for (ix, entry) in excerpt_ids.iter().enumerate() { if ix == 0 { - if entry.id.cmp(&ExcerptId::min(), &self).is_le() { + if entry.id.cmp(&ExcerptId::min(), self).is_le() { panic!("invalid first excerpt id {:?}", entry.id); } } else if entry.id <= excerpt_ids[ix - 1].id { @@ -6446,13 +6443,12 @@ impl MultiBufferSnapshot { inserted_hunk_info: prev_inserted_hunk_info, .. }) = prev_transform + && *inserted_hunk_info == *prev_inserted_hunk_info { - if *inserted_hunk_info == *prev_inserted_hunk_info { - panic!( - "multiple adjacent buffer content transforms with is_inserted_hunk = {inserted_hunk_info:?}. transforms: {:+?}", - self.diff_transforms.items(&()) - ); - } + panic!( + "multiple adjacent buffer content transforms with is_inserted_hunk = {inserted_hunk_info:?}. transforms: {:+?}", + self.diff_transforms.items(&()) + ); } if summary.len == 0 && !self.is_empty() { panic!("empty buffer content transform"); @@ -6552,14 +6548,12 @@ where self.excerpts.next(); } else if let Some(DiffTransform::DeletedHunk { hunk_info, .. }) = self.diff_transforms.item() - { - if self + && self .excerpts .item() - .map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id) - { - self.excerpts.next(); - } + .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id) + { + self.excerpts.next(); } } } @@ -6604,7 +6598,7 @@ where let prev_transform = self.diff_transforms.item(); self.diff_transforms.next(); - prev_transform.map_or(true, |next_transform| { + prev_transform.is_none_or(|next_transform| { matches!(next_transform, DiffTransform::BufferContent { .. }) }) } @@ -6619,12 +6613,12 @@ where } let next_transform = self.diff_transforms.next_item(); - next_transform.map_or(true, |next_transform| match next_transform { + next_transform.is_none_or(|next_transform| match next_transform { DiffTransform::BufferContent { .. } => true, DiffTransform::DeletedHunk { hunk_info, .. } => self .excerpts .item() - .map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id), + .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id), }) } @@ -6648,7 +6642,7 @@ where hunk_info, .. } => { - let diff = self.diffs.get(&buffer_id)?; + let diff = self.diffs.get(buffer_id)?; let buffer = diff.base_text(); let mut rope_cursor = buffer.as_rope().cursor(0); let buffer_start = rope_cursor.summary::(base_text_byte_range.start); @@ -6657,7 +6651,7 @@ where buffer_end.add_assign(&buffer_range_len); let start = self.diff_transforms.start().output_dimension.0; let end = self.diff_transforms.end().output_dimension.0; - return Some(MultiBufferRegion { + Some(MultiBufferRegion { buffer, excerpt, has_trailing_newline: *has_trailing_newline, @@ -6667,7 +6661,7 @@ where )), buffer_range: buffer_start..buffer_end, range: start..end, - }); + }) } DiffTransform::BufferContent { inserted_hunk_info, .. @@ -7004,7 +6998,7 @@ impl Excerpt { } fn contains(&self, anchor: &Anchor) -> bool { - Some(self.buffer_id) == anchor.buffer_id + (anchor.buffer_id == None || anchor.buffer_id == Some(self.buffer_id)) && self .range .context @@ -7164,7 +7158,7 @@ impl ExcerptId { Self(usize::MAX) } - pub fn to_proto(&self) -> u64 { + pub fn to_proto(self) -> u64 { self.0 as _ } @@ -7505,61 +7499,59 @@ impl Iterator for MultiBufferRows<'_> { self.cursor.next(); if let Some(next_region) = self.cursor.region() { region = next_region; - } else { - if self.point == self.cursor.diff_transforms.end().output_dimension.0 { - let multibuffer_row = MultiBufferRow(self.point.row); - let last_excerpt = self - .cursor - .excerpts - .item() - .or(self.cursor.excerpts.prev_item())?; - let last_row = last_excerpt - .range - .context - .end - .to_point(&last_excerpt.buffer) - .row; + } else if self.point == self.cursor.diff_transforms.end().output_dimension.0 { + let multibuffer_row = MultiBufferRow(self.point.row); + let last_excerpt = self + .cursor + .excerpts + .item() + .or(self.cursor.excerpts.prev_item())?; + let last_row = last_excerpt + .range + .context + .end + .to_point(&last_excerpt.buffer) + .row; - let first_row = last_excerpt - .range - .context - .start - .to_point(&last_excerpt.buffer) - .row; + let first_row = last_excerpt + .range + .context + .start + .to_point(&last_excerpt.buffer) + .row; - let expand_info = if self.is_singleton { - None - } else { - let needs_expand_up = first_row == last_row - && last_row > 0 - && !region.diff_hunk_status.is_some_and(|d| d.is_deleted()); - let needs_expand_down = last_row < last_excerpt.buffer.max_point().row; - - if needs_expand_up && needs_expand_down { - Some(ExpandExcerptDirection::UpAndDown) - } else if needs_expand_up { - Some(ExpandExcerptDirection::Up) - } else if needs_expand_down { - Some(ExpandExcerptDirection::Down) - } else { - None - } - .map(|direction| ExpandInfo { - direction, - excerpt_id: last_excerpt.id, - }) - }; - self.point += Point::new(1, 0); - return Some(RowInfo { - buffer_id: Some(last_excerpt.buffer_id), - buffer_row: Some(last_row), - multibuffer_row: Some(multibuffer_row), - diff_status: None, - expand_info, - }); + let expand_info = if self.is_singleton { + None } else { - return None; - } + let needs_expand_up = first_row == last_row + && last_row > 0 + && !region.diff_hunk_status.is_some_and(|d| d.is_deleted()); + let needs_expand_down = last_row < last_excerpt.buffer.max_point().row; + + if needs_expand_up && needs_expand_down { + Some(ExpandExcerptDirection::UpAndDown) + } else if needs_expand_up { + Some(ExpandExcerptDirection::Up) + } else if needs_expand_down { + Some(ExpandExcerptDirection::Down) + } else { + None + } + .map(|direction| ExpandInfo { + direction, + excerpt_id: last_excerpt.id, + }) + }; + self.point += Point::new(1, 0); + return Some(RowInfo { + buffer_id: Some(last_excerpt.buffer_id), + buffer_row: Some(last_row), + multibuffer_row: Some(multibuffer_row), + diff_status: None, + expand_info, + }); + } else { + return None; }; } @@ -7767,7 +7759,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } chunks } else { - let base_buffer = &self.diffs.get(&buffer_id)?.base_text(); + let base_buffer = &self.diffs.get(buffer_id)?.base_text(); base_buffer.chunks(base_text_start..base_text_end, self.language_aware) }; @@ -7855,10 +7847,11 @@ impl io::Read for ReversedMultiBufferBytes<'_> { if len > 0 { self.range.end -= len; self.chunk = &self.chunk[..self.chunk.len() - len]; - if !self.range.is_empty() && self.chunk.is_empty() { - if let Some(chunk) = self.chunks.next() { - self.chunk = chunk.as_bytes(); - } + if !self.range.is_empty() + && self.chunk.is_empty() + && let Some(chunk) = self.chunks.next() + { + self.chunk = chunk.as_bytes(); } } Ok(len) diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 824efa559f6d52bf654d8f6c6ff9655eaf4a0e52..61b4b0520f23ed50b3b36374710b52c78c37080f 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -473,7 +473,7 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) { let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n"; let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let diff = cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { @@ -2250,11 +2250,11 @@ impl ReferenceMultibuffer { let base_buffer = diff.base_text(); let mut offset = buffer_range.start; - let mut hunks = diff + let hunks = diff .hunks_intersecting_range(excerpt.range.clone(), buffer, cx) .peekable(); - while let Some(hunk) = hunks.next() { + for hunk in hunks { // Ignore hunks that are outside the excerpt range. let mut hunk_range = hunk.buffer_range.to_offset(buffer); @@ -2265,14 +2265,14 @@ impl ReferenceMultibuffer { } if !excerpt.expanded_diff_hunks.iter().any(|expanded_anchor| { - expanded_anchor.to_offset(&buffer).max(buffer_range.start) + expanded_anchor.to_offset(buffer).max(buffer_range.start) == hunk_range.start.max(buffer_range.start) }) { log::trace!("skipping a hunk that's not marked as expanded"); continue; } - if !hunk.buffer_range.start.is_valid(&buffer) { + if !hunk.buffer_range.start.is_valid(buffer) { log::trace!("skipping hunk with deleted start: {:?}", hunk.range); continue; } @@ -2449,7 +2449,7 @@ impl ReferenceMultibuffer { return false; } while let Some(hunk) = hunks.peek() { - match hunk.buffer_range.start.cmp(&hunk_anchor, &buffer) { + match hunk.buffer_range.start.cmp(hunk_anchor, &buffer) { cmp::Ordering::Less => { hunks.next(); } @@ -2519,8 +2519,8 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { let mut seen_ranges = Vec::default(); for (_, buf, range) in snapshot.excerpts() { - let start = range.context.start.to_point(&buf); - let end = range.context.end.to_point(&buf); + let start = range.context.start.to_point(buf); + let end = range.context.end.to_point(buf); seen_ranges.push(start..end); if let Some(last_end) = last_end.take() { @@ -2739,9 +2739,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let id = buffer_handle.read(cx).remote_id(); if multibuffer.diff_for(id).is_none() { let base_text = base_texts.get(&id).unwrap(); - let diff = cx.new(|cx| { - BufferDiff::new_with_base_text(base_text, &buffer_handle, cx) - }); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, buffer_handle, cx)); reference.add_diff(diff.clone(), cx); multibuffer.add_diff(diff, cx) } @@ -3593,24 +3592,20 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { for (anchors, bias) in [(&left_anchors, Bias::Left), (&right_anchors, Bias::Right)] { for (ix, (offset, anchor)) in offsets.iter().zip(anchors).enumerate() { - if ix > 0 { - if *offset == 252 { - if offset > &offsets[ix - 1] { - let prev_anchor = left_anchors[ix - 1]; - assert!( - anchor.cmp(&prev_anchor, snapshot).is_gt(), - "anchor({}, {bias:?}).cmp(&anchor({}, {bias:?}).is_gt()", - offsets[ix], - offsets[ix - 1], - ); - assert!( - prev_anchor.cmp(&anchor, snapshot).is_lt(), - "anchor({}, {bias:?}).cmp(&anchor({}, {bias:?}).is_lt()", - offsets[ix - 1], - offsets[ix], - ); - } - } + if ix > 0 && *offset == 252 && offset > &offsets[ix - 1] { + let prev_anchor = left_anchors[ix - 1]; + assert!( + anchor.cmp(&prev_anchor, snapshot).is_gt(), + "anchor({}, {bias:?}).cmp(&anchor({}, {bias:?}).is_gt()", + offsets[ix], + offsets[ix - 1], + ); + assert!( + prev_anchor.cmp(anchor, snapshot).is_lt(), + "anchor({}, {bias:?}).cmp(&anchor({}, {bias:?}).is_lt()", + offsets[ix - 1], + offsets[ix], + ); } } } diff --git a/crates/multi_buffer/src/position.rs b/crates/multi_buffer/src/position.rs index 06508750597b97d7275b964114bcdad0d0e34c79..8a3ce78d0d9f7a6880dbc3202c002507c800b7b0 100644 --- a/crates/multi_buffer/src/position.rs +++ b/crates/multi_buffer/src/position.rs @@ -126,17 +126,17 @@ impl Default for TypedRow { impl PartialOrd for TypedOffset { fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(&other)) + Some(self.cmp(other)) } } impl PartialOrd for TypedPoint { fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(&other)) + Some(self.cmp(other)) } } impl PartialOrd for TypedRow { fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(&other)) + Some(self.cmp(other)) } } diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 08698a1d6c1ac335b34e5a344b0252110e9b63f5..9d41eb1562943683aae3e785b1daac8bc3bfeb1a 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -29,6 +29,13 @@ pub struct NodeBinaryOptions { pub use_paths: Option<(PathBuf, PathBuf)>, } +pub enum VersionStrategy<'a> { + /// Install if current version doesn't match pinned version + Pin(&'a str), + /// Install if current version is older than latest version + Latest(&'a str), +} + #[derive(Clone)] pub struct NodeRuntime(Arc>); @@ -69,9 +76,8 @@ impl NodeRuntime { let mut state = self.0.lock().await; let options = loop { - match state.options.borrow().as_ref() { - Some(options) => break options.clone(), - None => {} + if let Some(options) = state.options.borrow().as_ref() { + break options.clone(); } match state.options.changed().await { Ok(()) => {} @@ -190,7 +196,7 @@ impl NodeRuntime { state.instance = Some(instance.boxed_clone()); state.last_options = Some(options); - return instance; + instance } pub async fn binary_path(&self) -> Result { @@ -286,7 +292,7 @@ impl NodeRuntime { package_name: &str, local_executable_path: &Path, local_package_directory: &Path, - latest_version: &str, + version_strategy: VersionStrategy<'_>, ) -> bool { // In the case of the local system not having the package installed, // or in the instances where we fail to parse package.json data, @@ -307,11 +313,21 @@ impl NodeRuntime { let Some(installed_version) = Version::parse(&installed_version).log_err() else { return true; }; - let Some(latest_version) = Version::parse(latest_version).log_err() else { - return true; - }; - installed_version < latest_version + match version_strategy { + VersionStrategy::Pin(pinned_version) => { + let Some(pinned_version) = Version::parse(pinned_version).log_err() else { + return true; + }; + installed_version != pinned_version + } + VersionStrategy::Latest(latest_version) => { + let Some(latest_version) = Version::parse(latest_version).log_err() else { + return true; + }; + installed_version < latest_version + } + } } } diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index 29653748e4873a271f58f932ee71c820aa755b9a..af2601bd181089a4952529ab4f315aa148e25121 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -138,10 +138,10 @@ impl NotificationStore { pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> { let mut cursor = self.notifications.cursor::(&()); cursor.seek(&NotificationId(id), Bias::Left); - if let Some(item) = cursor.item() { - if item.id == id { - return Some(item); - } + if let Some(item) = cursor.item() + && item.id == id + { + return Some(item); } None } @@ -229,25 +229,24 @@ impl NotificationStore { mut cx: AsyncApp, ) -> Result<()> { this.update(&mut cx, |this, cx| { - if let Some(notification) = envelope.payload.notification { - if let Some(rpc::Notification::ChannelMessageMention { message_id, .. }) = + if let Some(notification) = envelope.payload.notification + && let Some(rpc::Notification::ChannelMessageMention { message_id, .. }) = Notification::from_proto(¬ification) - { - let fetch_message_task = this.channel_store.update(cx, |this, cx| { - this.fetch_channel_messages(vec![message_id], cx) - }); - - cx.spawn(async move |this, cx| { - let messages = fetch_message_task.await?; - this.update(cx, move |this, cx| { - for message in messages { - this.channel_messages.insert(message_id, message); - } - cx.notify(); - }) + { + let fetch_message_task = this.channel_store.update(cx, |this, cx| { + this.fetch_channel_messages(vec![message_id], cx) + }); + + cx.spawn(async move |this, cx| { + let messages = fetch_message_task.await?; + this.update(cx, move |this, cx| { + for message in messages { + this.channel_messages.insert(message_id, message); + } + cx.notify(); }) - .detach_and_log_err(cx) - } + }) + .detach_and_log_err(cx) } Ok(()) })? @@ -390,12 +389,12 @@ impl NotificationStore { }); } } - } else if let Some(new_notification) = &new_notification { - if is_new { - cx.emit(NotificationEvent::NewNotification { - entry: new_notification.clone(), - }); - } + } else if let Some(new_notification) = &new_notification + && is_new + { + cx.emit(NotificationEvent::NewNotification { + entry: new_notification.clone(), + }); } if let Some(notification) = new_notification { diff --git a/crates/notifications/src/status_toast.rs b/crates/notifications/src/status_toast.rs index ffd87e0b8b8c4b797d3ecbbe489b0678fc64a812..7affa93f5a496bd0e436c74e5ff32f8aa871d026 100644 --- a/crates/notifications/src/status_toast.rs +++ b/crates/notifications/src/status_toast.rs @@ -205,7 +205,7 @@ impl Component for StatusToast { let pr_example = StatusToast::new("`zed/new-notification-system` created!", cx, |this, _cx| { - this.icon(ToastIcon::new(IconName::GitBranchSmall).color(Color::Muted)) + this.icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted)) .action("Open Pull Request", |_, cx| { cx.open_url("https://github.com/") }) diff --git a/crates/onboarding/Cargo.toml b/crates/onboarding/Cargo.toml index 436c714cf311bfd46e0eb5b961a8b929a5f09b58..4157be31723fe66eb8f12ddf581c292e8320be78 100644 --- a/crates/onboarding/Cargo.toml +++ b/crates/onboarding/Cargo.toml @@ -18,14 +18,13 @@ default = [] ai_onboarding.workspace = true anyhow.workspace = true client.workspace = true -command_palette_hooks.workspace = true component.workspace = true db.workspace = true documented.workspace = true editor.workspace = true -feature_flags.workspace = true fs.workspace = true fuzzy.workspace = true +git.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true @@ -37,6 +36,7 @@ project.workspace = true schemars.workspace = true serde.workspace = true settings.workspace = true +telemetry.workspace = true theme.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/onboarding/src/ai_setup_page.rs b/crates/onboarding/src/ai_setup_page.rs index 00f2d5fc8ba8ed904da4ecae5555792e9b54bf49..672bcf1cd992ee128f885825d60d805bbb011c40 100644 --- a/crates/onboarding/src/ai_setup_page.rs +++ b/crates/onboarding/src/ai_setup_page.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use ai_onboarding::AiUpsellCard; -use client::{Client, UserStore}; +use client::{Client, UserStore, zed_urls}; use fs::Fs; use gpui::{ Action, AnyView, App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, WeakEntity, @@ -19,7 +19,7 @@ use util::ResultExt; use workspace::{ModalView, Workspace}; use zed_actions::agent::OpenSettings; -const FEATURED_PROVIDERS: [&'static str; 4] = ["anthropic", "google", "openai", "ollama"]; +const FEATURED_PROVIDERS: [&str; 4] = ["anthropic", "google", "openai", "ollama"]; fn render_llm_provider_section( tab_index: &mut isize, @@ -42,10 +42,16 @@ fn render_llm_provider_section( } fn render_privacy_card(tab_index: &mut isize, disabled: bool, cx: &mut App) -> impl IntoElement { - let privacy_badge = || { - Badge::new("Privacy") - .icon(IconName::ShieldCheck) - .tooltip(move |_, cx| cx.new(|_| AiPrivacyTooltip::new()).into()) + let (title, description) = if disabled { + ( + "AI is disabled across Zed", + "Re-enable it any time in Settings.", + ) + } else { + ( + "Privacy is the default for Zed", + "Any use or storage of your data is with your explicit, single-use, opt-in consent.", + ) }; v_flex() @@ -60,62 +66,41 @@ fn render_privacy_card(tab_index: &mut isize, disabled: bool, cx: &mut App) -> i .bg(cx.theme().colors().surface_background.opacity(0.3)) .rounded_lg() .overflow_hidden() - .map(|this| { - if disabled { - this.child( + .child( + h_flex() + .gap_2() + .justify_between() + .child(Label::new(title)) + .child( h_flex() - .gap_2() - .justify_between() + .gap_1() .child( - h_flex() - .gap_1() - .child(Label::new("AI is disabled across Zed")) - .child( - Icon::new(IconName::Check) - .color(Color::Success) - .size(IconSize::XSmall), - ), + Badge::new("Privacy") + .icon(IconName::ShieldCheck) + .tooltip(move |_, cx| cx.new(|_| AiPrivacyTooltip::new()).into()), ) - .child(privacy_badge()), - ) - .child( - Label::new("Re-enable it any time in Settings.") - .size(LabelSize::Small) - .color(Color::Muted), - ) - } else { - this.child( - h_flex() - .gap_2() - .justify_between() - .child(Label::new("Privacy is the default for Zed")) .child( - h_flex().gap_1().child(privacy_badge()).child( - Button::new("learn_more", "Learn More") - .style(ButtonStyle::Outlined) - .label_size(LabelSize::Small) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .on_click(|_, _, cx| { - cx.open_url("https://zed.dev/docs/ai/privacy-and-security"); - }) - .tab_index({ - *tab_index += 1; - *tab_index - 1 - }), - ), + Button::new("learn_more", "Learn More") + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(|_, _, cx| { + cx.open_url(&zed_urls::ai_privacy_and_security(cx)) + }) + .tab_index({ + *tab_index += 1; + *tab_index - 1 + }), ), - ) - .child( - Label::new( - "Any use or storage of your data is with your explicit, single-use, opt-in consent.", - ) - .size(LabelSize::Small) - .color(Color::Muted), - ) - } - }) + ), + ) + .child( + Label::new(description) + .size(LabelSize::Small) + .color(Color::Muted), + ) } fn render_llm_provider_card( @@ -203,6 +188,11 @@ fn render_llm_provider_card( workspace .update(cx, |workspace, cx| { workspace.toggle_modal(window, cx, |window, cx| { + telemetry::event!( + "Welcome AI Modal Opened", + provider = provider.name().0, + ); + let modal = AiConfigurationModal::new( provider.clone(), window, @@ -260,16 +250,25 @@ pub(crate) fn render_ai_setup_page( ToggleState::Selected }, |&toggle_state, _, cx| { + let enabled = match toggle_state { + ToggleState::Indeterminate => { + return; + } + ToggleState::Unselected => true, + ToggleState::Selected => false, + }; + + telemetry::event!( + "Welcome AI Enabled", + toggle = if enabled { "on" } else { "off" }, + ); + let fs = ::global(cx); update_settings_file::( fs, cx, move |ai_settings: &mut Option, _| { - *ai_settings = match toggle_state { - ToggleState::Indeterminate => None, - ToggleState::Unselected => Some(true), - ToggleState::Selected => Some(false), - }; + *ai_settings = Some(enabled); }, ); }, @@ -330,7 +329,11 @@ impl AiConfigurationModal { cx: &mut Context, ) -> Self { let focus_handle = cx.focus_handle(); - let configuration_view = selected_provider.configuration_view(window, cx); + let configuration_view = selected_provider.configuration_view( + language_model::ConfigurationViewTargetAgent::ZedAgent, + window, + cx, + ); Self { focus_handle, @@ -407,7 +410,7 @@ impl AiPrivacyTooltip { impl Render for AiPrivacyTooltip { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - const DESCRIPTION: &'static str = "We believe in opt-in data sharing as the default for building AI products, rather than opt-out. We'll only use or store your data if you affirmatively send it to us. "; + const DESCRIPTION: &str = "We believe in opt-in data sharing as the default for building AI products, rather than opt-out. We'll only use or store your data if you affirmatively send it to us. "; tooltip_container(window, cx, move |this, _, _| { this.child( diff --git a/crates/welcome/src/base_keymap_picker.rs b/crates/onboarding/src/base_keymap_picker.rs similarity index 99% rename from crates/welcome/src/base_keymap_picker.rs rename to crates/onboarding/src/base_keymap_picker.rs index 92317ca7113aee06025d2490656e869c7b4a5b0a..0ac07d9a9d3b17921112d6accf6f4c9c9dd65ef6 100644 --- a/crates/welcome/src/base_keymap_picker.rs +++ b/crates/onboarding/src/base_keymap_picker.rs @@ -12,7 +12,7 @@ use util::ResultExt; use workspace::{ModalView, Workspace, ui::HighlightedLabel}; actions!( - welcome, + zed, [ /// Toggles the base keymap selector modal. ToggleBaseKeymapSelector diff --git a/crates/onboarding/src/basics_page.rs b/crates/onboarding/src/basics_page.rs index a19a21fddf309b71d275a28d5ce8dcabea0fadc6..441d2ca4b748e43c8c5db41a113c54e185b2f1de 100644 --- a/crates/onboarding/src/basics_page.rs +++ b/crates/onboarding/src/basics_page.rs @@ -16,6 +16,23 @@ use vim_mode_setting::VimModeSetting; use crate::theme_preview::{ThemePreviewStyle, ThemePreviewTile}; +const LIGHT_THEMES: [&str; 3] = ["One Light", "Ayu Light", "Gruvbox Light"]; +const DARK_THEMES: [&str; 3] = ["One Dark", "Ayu Dark", "Gruvbox Dark"]; +const FAMILY_NAMES: [SharedString; 3] = [ + SharedString::new_static("One"), + SharedString::new_static("Ayu"), + SharedString::new_static("Gruvbox"), +]; + +fn get_theme_family_themes(theme_name: &str) -> Option<(&'static str, &'static str)> { + for i in 0..LIGHT_THEMES.len() { + if LIGHT_THEMES[i] == theme_name || DARK_THEMES[i] == theme_name { + return Some((LIGHT_THEMES[i], DARK_THEMES[i])); + } + } + None +} + fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement { let theme_selection = ThemeSettings::get_global(cx).theme_selection.clone(); let system_appearance = theme::SystemAppearance::global(cx); @@ -58,7 +75,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement .tab_index(tab_index) .selected_index(theme_mode as usize) .style(ui::ToggleButtonGroupStyle::Outlined) - .button_width(rems_from_px(64.)), + .width(rems_from_px(3. * 64.)), ), ) .child( @@ -90,14 +107,6 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement }; let current_theme_name = theme_selection.theme(appearance); - const LIGHT_THEMES: [&'static str; 3] = ["One Light", "Ayu Light", "Gruvbox Light"]; - const DARK_THEMES: [&'static str; 3] = ["One Dark", "Ayu Dark", "Gruvbox Dark"]; - const FAMILY_NAMES: [SharedString; 3] = [ - SharedString::new_static("One"), - SharedString::new_static("Ayu"), - SharedString::new_static("Gruvbox"), - ]; - let theme_names = match appearance { Appearance::Light => LIGHT_THEMES, Appearance::Dark => DARK_THEMES, @@ -105,7 +114,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement let themes = theme_names.map(|theme| theme_registry.get(theme).unwrap()); - let theme_previews = [0, 1, 2].map(|index| { + [0, 1, 2].map(|index| { let theme = &themes[index]; let is_selected = theme.name == current_theme_name; let name = theme.name.clone(); @@ -117,7 +126,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement .gap_1() .child( h_flex() - .id(name.clone()) + .id(name) .relative() .w_full() .border_2() @@ -167,9 +176,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement .color(Color::Muted) .size(LabelSize::Small), ) - }); - - theme_previews + }) } fn write_mode_change(mode: ThemeMode, cx: &mut App) { @@ -184,14 +191,17 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement let theme = theme.into(); update_settings_file::(fs, cx, move |settings, cx| { if theme_mode == ThemeMode::System { + let (light_theme, dark_theme) = + get_theme_family_themes(&theme).unwrap_or((theme.as_ref(), theme.as_ref())); + settings.theme = Some(ThemeSelection::Dynamic { mode: ThemeMode::System, - light: ThemeName(theme.clone()), - dark: ThemeName(theme.clone()), + light: ThemeName(light_theme.into()), + dark: ThemeName(dark_theme.into()), }); } else { let appearance = *SystemAppearance::global(cx); - settings.set_theme(theme.clone(), appearance); + settings.set_theme(theme, appearance); } }); } @@ -305,8 +315,8 @@ fn render_base_keymap_section(tab_index: &mut isize, cx: &mut App) -> impl IntoE .when_some(base_keymap, |this, base_keymap| { this.selected_index(base_keymap) }) + .full_width() .tab_index(tab_index) - .button_width(rems_from_px(216.)) .size(ui::ToggleButtonGroupSize::Medium) .style(ui::ToggleButtonGroupStyle::Outlined), ); diff --git a/crates/onboarding/src/editing_page.rs b/crates/onboarding/src/editing_page.rs index 8b4293db0dba387c4e38d324f3b57edbc0173d4c..8fae695854b7771f8fa8e7c19826860838ef844f 100644 --- a/crates/onboarding/src/editing_page.rs +++ b/crates/onboarding/src/editing_page.rs @@ -35,6 +35,11 @@ fn write_show_mini_map(show: ShowMinimap, cx: &mut App) { EditorSettings::override_global(curr_settings, cx); update_settings_file::(fs, cx, move |editor_settings, _| { + telemetry::event!( + "Welcome Minimap Clicked", + from = editor_settings.minimap.unwrap_or_default(), + to = show + ); editor_settings.minimap.get_or_insert_default().show = Some(show); }); } @@ -71,7 +76,7 @@ fn read_git_blame(cx: &App) -> bool { ProjectSettings::get_global(cx).git.inline_blame_enabled() } -fn set_git_blame(enabled: bool, cx: &mut App) { +fn write_git_blame(enabled: bool, cx: &mut App) { let fs = ::global(cx); let mut curr_settings = ProjectSettings::get_global(cx).clone(); @@ -95,6 +100,12 @@ fn write_ui_font_family(font: SharedString, cx: &mut App) { let fs = ::global(cx); update_settings_file::(fs, cx, move |theme_settings, _| { + telemetry::event!( + "Welcome Font Changed", + type = "ui font", + old = theme_settings.ui_font_family, + new = font + ); theme_settings.ui_font_family = Some(FontFamilyName(font.into())); }); } @@ -119,6 +130,13 @@ fn write_buffer_font_family(font_family: SharedString, cx: &mut App) { let fs = ::global(cx); update_settings_file::(fs, cx, move |theme_settings, _| { + telemetry::event!( + "Welcome Font Changed", + type = "editor font", + old = theme_settings.buffer_font_family, + new = font_family + ); + theme_settings.buffer_font_family = Some(FontFamilyName(font_family.into())); }); } @@ -197,7 +215,7 @@ fn render_setting_import_button( .color(Color::Muted) .size(IconSize::XSmall), ) - .child(Label::new(label)), + .child(Label::new(label.clone())), ) .when(imported, |this| { this.child( @@ -212,7 +230,10 @@ fn render_setting_import_button( ) }), ) - .on_click(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)), + .on_click(move |_, window, cx| { + telemetry::event!("Welcome Import Settings", import_source = label,); + window.dispatch_action(action.boxed_clone(), cx); + }), ) } @@ -293,7 +314,7 @@ fn render_font_customization_section( .child( PopoverMenu::new("ui-font-picker") .menu({ - let ui_font_picker = ui_font_picker.clone(); + let ui_font_picker = ui_font_picker; move |_window, _cx| Some(ui_font_picker.clone()) }) .trigger( @@ -357,7 +378,7 @@ fn render_font_customization_section( .child( PopoverMenu::new("buffer-font-picker") .menu({ - let buffer_font_picker = buffer_font_picker.clone(); + let buffer_font_picker = buffer_font_picker; move |_window, _cx| Some(buffer_font_picker.clone()) }) .trigger( @@ -573,7 +594,7 @@ fn font_picker( ) -> FontPicker { let delegate = FontPickerDelegate::new(current_font, on_font_changed, cx); - Picker::list(delegate, window, cx) + Picker::uniform_list(delegate, window, cx) .show_scrollbar(true) .width(rems_from_px(210.)) .max_height(Some(rems(20.).into())) @@ -584,7 +605,7 @@ fn render_popular_settings_section( window: &mut Window, cx: &mut App, ) -> impl IntoElement { - const LIGATURE_TOOLTIP: &'static str = + const LIGATURE_TOOLTIP: &str = "Font ligatures combine two characters into one. For example, turning =/= into ≠."; v_flex() @@ -605,7 +626,13 @@ fn render_popular_settings_section( ui::ToggleState::Unselected }, |toggle_state, _, cx| { - write_font_ligatures(toggle_state == &ToggleState::Selected, cx); + let enabled = toggle_state == &ToggleState::Selected; + telemetry::event!( + "Welcome Font Ligature", + options = if enabled { "on" } else { "off" }, + ); + + write_font_ligatures(enabled, cx); }, ) .tab_index({ @@ -625,7 +652,13 @@ fn render_popular_settings_section( ui::ToggleState::Unselected }, |toggle_state, _, cx| { - write_format_on_save(toggle_state == &ToggleState::Selected, cx); + let enabled = toggle_state == &ToggleState::Selected; + telemetry::event!( + "Welcome Format On Save Changed", + options = if enabled { "on" } else { "off" }, + ); + + write_format_on_save(enabled, cx); }, ) .tab_index({ @@ -644,7 +677,13 @@ fn render_popular_settings_section( ui::ToggleState::Unselected }, |toggle_state, _, cx| { - write_inlay_hints(toggle_state == &ToggleState::Selected, cx); + let enabled = toggle_state == &ToggleState::Selected; + telemetry::event!( + "Welcome Inlay Hints Changed", + options = if enabled { "on" } else { "off" }, + ); + + write_inlay_hints(enabled, cx); }, ) .tab_index({ @@ -655,7 +694,7 @@ fn render_popular_settings_section( .child( SwitchField::new( "onboarding-git-blame-switch", - "Git Blame", + "Inline Git Blame", Some("See who committed each line on a given file.".into()), if read_git_blame(cx) { ui::ToggleState::Selected @@ -663,7 +702,13 @@ fn render_popular_settings_section( ui::ToggleState::Unselected }, |toggle_state, _, cx| { - set_git_blame(toggle_state == &ToggleState::Selected, cx); + let enabled = toggle_state == &ToggleState::Selected; + telemetry::event!( + "Welcome Git Blame Changed", + options = if enabled { "on" } else { "off" }, + ); + + write_git_blame(enabled, cx); }, ) .tab_index({ @@ -676,7 +721,7 @@ fn render_popular_settings_section( .items_start() .justify_between() .child( - v_flex().child(Label::new("Mini Map")).child( + v_flex().child(Label::new("Minimap")).child( Label::new("See a high-level overview of your source code.") .color(Color::Muted), ), @@ -706,7 +751,7 @@ fn render_popular_settings_section( }) .tab_index(tab_index) .style(ToggleButtonGroupStyle::Outlined) - .button_width(ui::rems_from_px(64.)), + .width(ui::rems_from_px(3. * 64.)), ), ) } diff --git a/crates/welcome/src/multibuffer_hint.rs b/crates/onboarding/src/multibuffer_hint.rs similarity index 99% rename from crates/welcome/src/multibuffer_hint.rs rename to crates/onboarding/src/multibuffer_hint.rs index ea64cab9df3de0a7e0f1969afe86c64d34be678a..3a20cbb6bd4443356db3a9fc1402b1102558ea02 100644 --- a/crates/welcome/src/multibuffer_hint.rs +++ b/crates/onboarding/src/multibuffer_hint.rs @@ -159,7 +159,7 @@ impl Render for MultibufferHint { .child( Button::new("open_docs", "Learn More") .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .icon_position(IconPosition::End) .on_click(move |_event, _, cx| { diff --git a/crates/onboarding/src/onboarding.rs b/crates/onboarding/src/onboarding.rs index 98f61df97b8e9476b974dd88ad31ff256c61cfa0..884374a72fe8b71bc55803b800c9429c19a96d5e 100644 --- a/crates/onboarding/src/onboarding.rs +++ b/crates/onboarding/src/onboarding.rs @@ -1,8 +1,7 @@ -use crate::welcome::{ShowWelcome, WelcomePage}; -use client::{Client, UserStore}; -use command_palette_hooks::CommandPaletteFilter; +pub use crate::welcome::ShowWelcome; +use crate::{multibuffer_hint::MultibufferHint, welcome::WelcomePage}; +use client::{Client, UserStore, zed_urls}; use db::kvp::KEY_VALUE_STORE; -use feature_flags::{FeatureFlag, FeatureFlagViewExt as _}; use fs::Fs; use gpui::{ Action, AnyElement, App, AppContext, AsyncWindowContext, Context, Entity, EventEmitter, @@ -27,17 +26,13 @@ use workspace::{ }; mod ai_setup_page; +mod base_keymap_picker; mod basics_page; mod editing_page; +pub mod multibuffer_hint; mod theme_preview; mod welcome; -pub struct OnBoardingFeatureFlag {} - -impl FeatureFlag for OnBoardingFeatureFlag { - const NAME: &'static str = "onboarding"; -} - /// Imports settings from Visual Studio Code. #[derive(Copy, Clone, Debug, Default, PartialEq, Deserialize, JsonSchema, Action)] #[action(namespace = zed)] @@ -57,6 +52,7 @@ pub struct ImportCursorSettings { } pub const FIRST_OPEN: &str = "first_open"; +pub const DOCS_URL: &str = "https://zed.dev/docs/"; actions!( zed, @@ -78,11 +74,21 @@ actions!( /// Finish the onboarding process. Finish, /// Sign in while in the onboarding flow. - SignIn + SignIn, + /// Open the user account in zed.dev while in the onboarding flow. + OpenAccount, + /// Resets the welcome screen hints to their initial state. + ResetHints ] ); pub fn init(cx: &mut App) { + cx.observe_new(|workspace: &mut Workspace, _, _cx| { + workspace + .register_action(|_workspace, _: &ResetHints, _, cx| MultibufferHint::set_count(0, cx)); + }) + .detach(); + cx.on_action(|_: &OpenOnboarding, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { workspace @@ -180,38 +186,14 @@ pub fn init(cx: &mut App) { }) .detach(); - cx.observe_new::(|_, window, cx| { - let Some(window) = window else { - return; - }; - - let onboarding_actions = [ - std::any::TypeId::of::(), - std::any::TypeId::of::(), - ]; - - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.hide_action_types(&onboarding_actions); - }); + base_keymap_picker::init(cx); - cx.observe_flag::(window, move |is_enabled, _, _, cx| { - if is_enabled { - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.show_action_types(onboarding_actions.iter()); - }); - } else { - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.hide_action_types(&onboarding_actions); - }); - } - }) - .detach(); - }) - .detach(); register_serializable_item::(cx); + register_serializable_item::(cx); } pub fn show_onboarding_view(app_state: Arc, cx: &mut App) -> Task> { + telemetry::event!("Onboarding Page Opened"); open_new( Default::default(), app_state, @@ -240,6 +222,16 @@ enum SelectedPage { AiSetup, } +impl SelectedPage { + fn name(&self) -> &'static str { + match self { + SelectedPage::Basics => "Basics", + SelectedPage::Editing => "Editing", + SelectedPage::AiSetup => "AI Setup", + } + } +} + struct Onboarding { workspace: WeakEntity, focus_handle: FocusHandle, @@ -259,7 +251,21 @@ impl Onboarding { }) } - fn set_page(&mut self, page: SelectedPage, cx: &mut Context) { + fn set_page( + &mut self, + page: SelectedPage, + clicked: Option<&'static str>, + cx: &mut Context, + ) { + if let Some(click) = clicked { + telemetry::event!( + "Welcome Tab Clicked", + from = self.selected_page.name(), + to = page.name(), + clicked = click, + ); + } + self.selected_page = page; cx.notify(); cx.emit(ItemEvent::UpdateTab); @@ -323,8 +329,13 @@ impl Onboarding { gpui::Empty.into_any_element(), IntoElement::into_any_element, )) - .on_click(cx.listener(move |this, _, _, cx| { - this.set_page(page, cx); + .on_click(cx.listener(move |this, click_event, _, cx| { + let click = match click_event { + gpui::ClickEvent::Mouse(_) => "mouse", + gpui::ClickEvent::Keyboard(_) => "keyboard", + }; + + this.set_page(page, Some(click), cx); })) }) } @@ -420,11 +431,40 @@ impl Onboarding { ) .child( if let Some(user) = self.user_store.read(cx).current_user() { - h_flex() - .pl_1p5() - .gap_2() - .child(Avatar::new(user.avatar_uri.clone())) - .child(Label::new(user.github_login.clone())) + v_flex() + .gap_1() + .child( + h_flex() + .ml_2() + .gap_2() + .max_w_full() + .w_full() + .child(Avatar::new(user.avatar_uri.clone())) + .child(Label::new(user.github_login.clone()).truncate()), + ) + .child( + ButtonLike::new("open_account") + .size(ButtonSize::Medium) + .child( + h_flex() + .ml_1() + .w_full() + .justify_between() + .child(Label::new("Open Account")) + .children( + KeyBinding::for_action_in( + &OpenAccount, + &self.focus_handle, + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ), + ) + .on_click(|_, window, cx| { + window.dispatch_action(OpenAccount.boxed_clone(), cx); + }), + ) .into_any_element() } else { Button::new("sign_in", "Sign In") @@ -444,6 +484,7 @@ impl Onboarding { } fn on_finish(_: &Finish, _: &mut Window, cx: &mut App) { + telemetry::event!("Welcome Skip Clicked"); go_to_welcome_page(cx); } @@ -453,13 +494,17 @@ impl Onboarding { window .spawn(cx, async move |cx| { client - .sign_in_with_optional_connect(true, &cx) + .sign_in_with_optional_connect(true, cx) .await .notify_async_err(cx); }) .detach(); } + fn handle_open_account(_: &OpenAccount, _: &mut Window, cx: &mut App) { + cx.open_url(&zed_urls::account_url(cx)) + } + fn render_page(&mut self, window: &mut Window, cx: &mut Context) -> AnyElement { let client = Client::global(cx); @@ -495,14 +540,15 @@ impl Render for Onboarding { .bg(cx.theme().colors().editor_background) .on_action(Self::on_finish) .on_action(Self::handle_sign_in) + .on_action(Self::handle_open_account) .on_action(cx.listener(|this, _: &ActivateBasicsPage, _, cx| { - this.set_page(SelectedPage::Basics, cx); + this.set_page(SelectedPage::Basics, Some("action"), cx); })) .on_action(cx.listener(|this, _: &ActivateEditingPage, _, cx| { - this.set_page(SelectedPage::Editing, cx); + this.set_page(SelectedPage::Editing, Some("action"), cx); })) .on_action(cx.listener(|this, _: &ActivateAISetupPage, _, cx| { - this.set_page(SelectedPage::AiSetup, cx); + this.set_page(SelectedPage::AiSetup, Some("action"), cx); })) .on_action(cx.listener(|_, _: &menu::SelectNext, window, cx| { window.focus_next(); @@ -515,6 +561,7 @@ impl Render for Onboarding { .child( h_flex() .max_w(rems_from_px(1100.)) + .max_h(rems_from_px(850.)) .size_full() .m_auto() .py_20() @@ -524,12 +571,14 @@ impl Render for Onboarding { .child(self.render_nav(window, cx)) .child( v_flex() + .id("page-content") + .size_full() .max_w_full() .min_w_0() .pl_12() .border_l_1() .border_color(cx.theme().colors().border_variant.opacity(0.5)) - .size_full() + .overflow_y_scroll() .child(self.render_page(window, cx)), ), ) @@ -565,9 +614,13 @@ impl Item for Onboarding { _: &mut Window, cx: &mut Context, ) -> Option> { - self.workspace - .update(cx, |workspace, cx| Onboarding::new(workspace, cx)) - .ok() + Some(cx.new(|cx| Onboarding { + workspace: self.workspace.clone(), + user_store: self.user_store.clone(), + selected_page: self.selected_page, + focus_handle: cx.focus_handle(), + _settings_subscription: cx.observe_global::(move |_, cx| cx.notify()), + })) } fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) { @@ -690,7 +743,7 @@ pub async fn handle_import_vscode_settings( "Failed to import settings. See log for details", cx, |this, _| { - this.icon(ToastIcon::new(IconName::X).color(Color::Error)) + this.icon(ToastIcon::new(IconName::Close).color(Color::Error)) .action("Open Log", |window, cx| { window.dispatch_action(workspace::OpenLog.boxed_clone(), cx) }) @@ -763,7 +816,7 @@ impl workspace::SerializableItem for Onboarding { if let Some(page) = page { zlog::info!("Onboarding page {page:?} loaded"); onboarding_page.update(cx, |onboarding_page, cx| { - onboarding_page.set_page(page, cx); + onboarding_page.set_page(page, None, cx); }) } onboarding_page diff --git a/crates/onboarding/src/theme_preview.rs b/crates/onboarding/src/theme_preview.rs index 81eb14ec4b0cb502618c8bd06d4cd92a63186c71..8bd65d8a2707acdc53333071486f41741398a82a 100644 --- a/crates/onboarding/src/theme_preview.rs +++ b/crates/onboarding/src/theme_preview.rs @@ -1,6 +1,9 @@ #![allow(unused, dead_code)] use gpui::{Hsla, Length}; -use std::sync::Arc; +use std::{ + cell::LazyCell, + sync::{Arc, LazyLock, OnceLock}, +}; use theme::{Theme, ThemeColors, ThemeRegistry}; use ui::{ IntoElement, RenderOnce, component_prelude::Documented, prelude::*, utils::inner_corner_radius, @@ -22,6 +25,15 @@ pub struct ThemePreviewTile { style: ThemePreviewStyle, } +static CHILD_RADIUS: LazyLock = LazyLock::new(|| { + inner_corner_radius( + ThemePreviewTile::ROOT_RADIUS, + ThemePreviewTile::ROOT_BORDER, + ThemePreviewTile::ROOT_PADDING, + ThemePreviewTile::CHILD_BORDER, + ) +}); + impl ThemePreviewTile { pub const SKELETON_HEIGHT_DEFAULT: Pixels = px(2.); pub const SIDEBAR_SKELETON_ITEM_COUNT: usize = 8; @@ -30,14 +42,6 @@ impl ThemePreviewTile { pub const ROOT_BORDER: Pixels = px(2.0); pub const ROOT_PADDING: Pixels = px(2.0); pub const CHILD_BORDER: Pixels = px(1.0); - pub const CHILD_RADIUS: std::cell::LazyCell = std::cell::LazyCell::new(|| { - inner_corner_radius( - Self::ROOT_RADIUS, - Self::ROOT_BORDER, - Self::ROOT_PADDING, - Self::CHILD_BORDER, - ) - }); pub fn new(theme: Arc, seed: f32) -> Self { Self { @@ -202,16 +206,16 @@ impl ThemePreviewTile { sidebar_width, skeleton_height.clone(), )) - .child(Self::render_pane(seed, theme, skeleton_height.clone())) + .child(Self::render_pane(seed, theme, skeleton_height)) } fn render_borderless(seed: f32, theme: Arc) -> impl IntoElement { - return Self::render_editor( + Self::render_editor( seed, theme, Self::SIDEBAR_WIDTH_DEFAULT, Self::SKELETON_HEIGHT_DEFAULT, - ); + ) } fn render_border(seed: f32, theme: Arc) -> impl IntoElement { @@ -222,7 +226,7 @@ impl ThemePreviewTile { .child( div() .size_full() - .rounded(*Self::CHILD_RADIUS) + .rounded(*CHILD_RADIUS) .border(Self::CHILD_BORDER) .border_color(theme.colors().border) .child(Self::render_editor( @@ -242,7 +246,7 @@ impl ThemePreviewTile { ) -> impl IntoElement { let sidebar_width = relative(0.20); - return div() + div() .size_full() .p(Self::ROOT_PADDING) .rounded(Self::ROOT_RADIUS) @@ -250,13 +254,13 @@ impl ThemePreviewTile { h_flex() .size_full() .relative() - .rounded(*Self::CHILD_RADIUS) + .rounded(*CHILD_RADIUS) .border(Self::CHILD_BORDER) .border_color(border_color) .overflow_hidden() .child(div().size_full().child(Self::render_editor( seed, - theme.clone(), + theme, sidebar_width, Self::SKELETON_HEIGHT_DEFAULT, ))) @@ -274,7 +278,7 @@ impl ThemePreviewTile { )), ), ) - .into_any_element(); + .into_any_element() } } @@ -325,9 +329,9 @@ impl Component for ThemePreviewTile { let themes_to_preview = vec![ one_dark.clone().ok(), - one_light.clone().ok(), - gruvbox_dark.clone().ok(), - gruvbox_light.clone().ok(), + one_light.ok(), + gruvbox_dark.ok(), + gruvbox_light.ok(), ] .into_iter() .flatten() @@ -344,7 +348,7 @@ impl Component for ThemePreviewTile { div() .w(px(240.)) .h(px(180.)) - .child(ThemePreviewTile::new(one_dark.clone(), 0.42)) + .child(ThemePreviewTile::new(one_dark, 0.42)) .into_any_element(), )])] } else { @@ -358,13 +362,12 @@ impl Component for ThemePreviewTile { .gap_4() .children( themes_to_preview - .iter() - .enumerate() - .map(|(_, theme)| { + .into_iter() + .map(|theme| { div() .w(px(200.)) .h(px(140.)) - .child(ThemePreviewTile::new(theme.clone(), 0.42)) + .child(ThemePreviewTile::new(theme, 0.42)) }) .collect::>(), ) diff --git a/crates/onboarding/src/welcome.rs b/crates/onboarding/src/welcome.rs index d4d6c3f701dea4f381160b8d76b43c1378685656..3fe9c32a48c4f6ee9cb3756e08b1eb9a836657dc 100644 --- a/crates/onboarding/src/welcome.rs +++ b/crates/onboarding/src/welcome.rs @@ -1,6 +1,6 @@ use gpui::{ Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, - NoAction, ParentElement, Render, Styled, Window, actions, + ParentElement, Render, Styled, Task, Window, actions, }; use menu::{SelectNext, SelectPrevious}; use ui::{ButtonLike, Divider, DividerColor, KeyBinding, Vector, VectorName, prelude::*}; @@ -37,9 +37,8 @@ const CONTENT: (Section<4>, Section<3>) = ( }, SectionEntry { icon: IconName::CloudDownload, - title: "Clone a Repo", - // TODO: use proper action - action: &NoAction, + title: "Clone Repository", + action: &git::Clone, }, SectionEntry { icon: IconName::ListCollapse, @@ -105,7 +104,7 @@ impl Section { self.entries .iter() .enumerate() - .map(|(index, entry)| entry.render(index_offset + index, &focus, window, cx)), + .map(|(index, entry)| entry.render(index_offset + index, focus, window, cx)), ) } } @@ -353,3 +352,109 @@ impl Item for WelcomePage { f(*event) } } + +impl workspace::SerializableItem for WelcomePage { + fn serialized_item_kind() -> &'static str { + "WelcomePage" + } + + fn cleanup( + workspace_id: workspace::WorkspaceId, + alive_items: Vec, + _window: &mut Window, + cx: &mut App, + ) -> Task> { + workspace::delete_unloaded_items( + alive_items, + workspace_id, + "welcome_pages", + &persistence::WELCOME_PAGES, + cx, + ) + } + + fn deserialize( + _project: Entity, + _workspace: gpui::WeakEntity, + workspace_id: workspace::WorkspaceId, + item_id: workspace::ItemId, + window: &mut Window, + cx: &mut App, + ) -> Task>> { + if persistence::WELCOME_PAGES + .get_welcome_page(item_id, workspace_id) + .ok() + .is_some_and(|is_open| is_open) + { + window.spawn(cx, async move |cx| cx.update(WelcomePage::new)) + } else { + Task::ready(Err(anyhow::anyhow!("No welcome page to deserialize"))) + } + } + + fn serialize( + &mut self, + workspace: &mut workspace::Workspace, + item_id: workspace::ItemId, + _closing: bool, + _window: &mut Window, + cx: &mut Context, + ) -> Option>> { + let workspace_id = workspace.database_id()?; + Some(cx.background_spawn(async move { + persistence::WELCOME_PAGES + .save_welcome_page(item_id, workspace_id, true) + .await + })) + } + + fn should_serialize(&self, event: &Self::Event) -> bool { + event == &ItemEvent::UpdateTab + } +} + +mod persistence { + use db::{define_connection, query, sqlez_macros::sql}; + use workspace::WorkspaceDb; + + define_connection! { + pub static ref WELCOME_PAGES: WelcomePagesDb = + &[ + sql!( + CREATE TABLE welcome_pages ( + workspace_id INTEGER, + item_id INTEGER UNIQUE, + is_open INTEGER DEFAULT FALSE, + + PRIMARY KEY(workspace_id, item_id), + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ) STRICT; + ), + ]; + } + + impl WelcomePagesDb { + query! { + pub async fn save_welcome_page( + item_id: workspace::ItemId, + workspace_id: workspace::WorkspaceId, + is_open: bool + ) -> Result<()> { + INSERT OR REPLACE INTO welcome_pages(item_id, workspace_id, is_open) + VALUES (?, ?, ?) + } + } + + query! { + pub fn get_welcome_page( + item_id: workspace::ItemId, + workspace_id: workspace::WorkspaceId + ) -> Result { + SELECT is_open + FROM welcome_pages + WHERE item_id = ? AND workspace_id = ? + } + } + } +} diff --git a/crates/open_ai/Cargo.toml b/crates/open_ai/Cargo.toml index 2d40cd2735a6be892f3373f87ad5be42f995d759..bae00f0a8e888390cc8be4909e752567b94d1a27 100644 --- a/crates/open_ai/Cargo.toml +++ b/crates/open_ai/Cargo.toml @@ -20,6 +20,7 @@ anyhow.workspace = true futures.workspace = true http_client.workspace = true schemars = { workspace = true, optional = true } +log.workspace = true serde.workspace = true serde_json.workspace = true strum.workspace = true diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 4697d71ed337a94ad3c2a80cda3e50923042aa23..acf6ec434a013c711e0c77be3658913f9c6db7cb 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -9,7 +9,7 @@ use strum::EnumIter; pub const OPEN_AI_API_URL: &str = "https://api.openai.com/v1"; fn is_none_or_empty, U>(opt: &Option) -> bool { - opt.as_ref().map_or(true, |v| v.as_ref().is_empty()) + opt.as_ref().is_none_or(|v| v.as_ref().is_empty()) } #[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] @@ -89,11 +89,13 @@ pub enum Model { max_tokens: u64, max_output_tokens: Option, max_completion_tokens: Option, + reasoning_effort: Option, }, } impl Model { pub fn default_fast() -> Self { + // TODO: Replace with FiveMini since all other models are deprecated Self::FourPointOneMini } @@ -206,6 +208,15 @@ impl Model { } } + pub fn reasoning_effort(&self) -> Option { + match self { + Self::Custom { + reasoning_effort, .. + } => reasoning_effort.to_owned(), + _ => None, + } + } + /// Returns whether the given model supports the `parallel_tool_calls` parameter. /// /// If the model does not support the parameter, do not pass it up, or the API will return an error. @@ -225,6 +236,13 @@ impl Model { Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false, } } + + /// Returns whether the given model supports the `prompt_cache_key` parameter. + /// + /// If the model does not support the parameter, do not pass it up. + pub fn supports_prompt_cache_key(&self) -> bool { + true + } } #[derive(Debug, Serialize, Deserialize)] @@ -244,6 +262,10 @@ pub struct Request { pub parallel_tool_calls: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tools: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub prompt_cache_key: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub reasoning_effort: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -255,6 +277,16 @@ pub enum ToolChoice { Other(ToolDefinition), } +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningEffort { + Minimal, + Low, + Medium, + High, +} + #[derive(Clone, Deserialize, Serialize, Debug)] #[serde(tag = "type", rename_all = "snake_case")] pub enum ToolDefinition { @@ -400,11 +432,16 @@ pub struct ChoiceDelta { pub finish_reason: Option, } +#[derive(Serialize, Deserialize, Debug)] +pub struct OpenAiError { + message: String, +} + #[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] pub enum ResponseStreamResult { Ok(ResponseStreamEvent), - Err { error: String }, + Err { error: OpenAiError }, } #[derive(Serialize, Deserialize, Debug)] @@ -443,9 +480,17 @@ pub async fn stream_completion( match serde_json::from_str(line) { Ok(ResponseStreamResult::Ok(response)) => Some(Ok(response)), Ok(ResponseStreamResult::Err { error }) => { + Some(Err(anyhow!(error.message))) + } + Err(error) => { + log::error!( + "Failed to parse OpenAI response into ResponseStreamResult: `{}`\n\ + Response: `{}`", + error, + line, + ); Some(Err(anyhow!(error))) } - Err(error) => Some(Err(anyhow!(error))), } } } @@ -462,11 +507,6 @@ pub async fn stream_completion( error: OpenAiError, } - #[derive(Deserialize)] - struct OpenAiError { - message: String, - } - match serde_json::from_str::(&body) { Ok(response) if !response.error.message.is_empty() => Err(anyhow!( "API request to {} failed: {}", diff --git a/crates/open_router/src/open_router.rs b/crates/open_router/src/open_router.rs index 3e6e406d9842d5996f2e866d534094ded23fd61c..b7e6d69d8fbe7c342e833cd13ad069399fb44a26 100644 --- a/crates/open_router/src/open_router.rs +++ b/crates/open_router/src/open_router.rs @@ -8,7 +8,7 @@ use std::convert::TryFrom; pub const OPEN_ROUTER_API_URL: &str = "https://openrouter.ai/api/v1"; fn is_none_or_empty, U>(opt: &Option) -> bool { - opt.as_ref().map_or(true, |v| v.as_ref().is_empty()) + opt.as_ref().is_none_or(|v| v.as_ref().is_empty()) } #[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] @@ -240,10 +240,10 @@ impl MessageContent { impl From> for MessageContent { fn from(parts: Vec) -> Self { - if parts.len() == 1 { - if let MessagePart::Text { text } = &parts[0] { - return Self::Plain(text.clone()); - } + if parts.len() == 1 + && let MessagePart::Text { text } = &parts[0] + { + return Self::Plain(text.clone()); } Self::Multipart(parts) } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 1cda3897ec356c76b8abf4751bad6c35873c1300..10698cead8656885d0ea2d2f98ebd235e29fec1b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -503,16 +503,16 @@ impl SearchData { && multi_buffer_snapshot .chars_at(extended_context_left_border) .last() - .map_or(false, |c| !c.is_whitespace()); + .is_some_and(|c| !c.is_whitespace()); let truncated_right = entire_context_text .chars() .last() - .map_or(true, |c| !c.is_whitespace()) + .is_none_or(|c| !c.is_whitespace()) && extended_context_right_border > context_right_border && multi_buffer_snapshot .chars_at(extended_context_right_border) .next() - .map_or(false, |c| !c.is_whitespace()); + .is_some_and(|c| !c.is_whitespace()); search_match_indices.iter_mut().for_each(|range| { range.start = multi_buffer_snapshot.clip_offset( range.start.saturating_sub(left_whitespaces_offset), @@ -733,7 +733,8 @@ impl OutlinePanel { ) -> Entity { let project = workspace.project().clone(); let workspace_handle = cx.entity().downgrade(); - let outline_panel = cx.new(|cx| { + + cx.new(|cx| { let filter_editor = cx.new(|cx| { let mut editor = Editor::single_line(window, cx); editor.set_placeholder_text("Filter...", cx); @@ -912,9 +913,7 @@ impl OutlinePanel { outline_panel.replace_active_editor(item, editor, window, cx); } outline_panel - }); - - outline_panel + }) } fn serialization_key(workspace: &Workspace) -> Option { @@ -1170,12 +1169,11 @@ impl OutlinePanel { }); } else { let mut offset = Point::default(); - if let Some(buffer_id) = scroll_to_buffer { - if multi_buffer_snapshot.as_singleton().is_none() - && !active_editor.read(cx).is_buffer_folded(buffer_id, cx) - { - offset.y = -(active_editor.read(cx).file_header_size() as f32); - } + if let Some(buffer_id) = scroll_to_buffer + && multi_buffer_snapshot.as_singleton().is_none() + && !active_editor.read(cx).is_buffer_folded(buffer_id, cx) + { + offset.y = -(active_editor.read(cx).file_header_size() as f32); } active_editor.update(cx, |editor, cx| { @@ -1260,7 +1258,7 @@ impl OutlinePanel { dirs_worktree_id == worktree_id && dirs .last() - .map_or(false, |dir| dir.path.as_ref() == parent_path) + .is_some_and(|dir| dir.path.as_ref() == parent_path) } _ => false, }) @@ -1454,9 +1452,7 @@ impl OutlinePanel { if self .unfolded_dirs .get(&directory_worktree) - .map_or(true, |unfolded_dirs| { - !unfolded_dirs.contains(&directory_entry.id) - }) + .is_none_or(|unfolded_dirs| !unfolded_dirs.contains(&directory_entry.id)) { return false; } @@ -1606,16 +1602,14 @@ impl OutlinePanel { } PanelEntry::FoldedDirs(folded_dirs) => { let mut folded = false; - if let Some(dir_entry) = folded_dirs.entries.last() { - if self + if let Some(dir_entry) = folded_dirs.entries.last() + && self .collapsed_entries .insert(CollapsedEntry::Dir(folded_dirs.worktree_id, dir_entry.id)) - { - folded = true; - buffers_to_fold.extend( - self.buffers_inside_directory(folded_dirs.worktree_id, dir_entry), - ); - } + { + folded = true; + buffers_to_fold + .extend(self.buffers_inside_directory(folded_dirs.worktree_id, dir_entry)); } folded } @@ -2108,11 +2102,11 @@ impl OutlinePanel { dirs_to_expand.push(current_entry.id); } - if traversal.back_to_parent() { - if let Some(parent_entry) = traversal.entry() { - current_entry = parent_entry.clone(); - continue; - } + if traversal.back_to_parent() + && let Some(parent_entry) = traversal.entry() + { + current_entry = parent_entry.clone(); + continue; } break; } @@ -2159,7 +2153,7 @@ impl OutlinePanel { ExcerptOutlines::Invalidated(outlines) => Some(outlines), ExcerptOutlines::NotFetched => None, }) - .map_or(false, |outlines| !outlines.is_empty()); + .is_some_and(|outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); @@ -2475,17 +2469,17 @@ impl OutlinePanel { let search_data = match render_data.get() { Some(search_data) => search_data, None => { - if let ItemsDisplayMode::Search(search_state) = &mut self.mode { - if let Some(multi_buffer_snapshot) = multi_buffer_snapshot { - search_state - .highlight_search_match_tx - .try_send(HighlightArguments { - multi_buffer_snapshot: multi_buffer_snapshot.clone(), - match_range: match_range.clone(), - search_data: Arc::clone(render_data), - }) - .ok(); - } + if let ItemsDisplayMode::Search(search_state) = &mut self.mode + && let Some(multi_buffer_snapshot) = multi_buffer_snapshot + { + search_state + .highlight_search_match_tx + .try_send(HighlightArguments { + multi_buffer_snapshot: multi_buffer_snapshot.clone(), + match_range: match_range.clone(), + search_data: Arc::clone(render_data), + }) + .ok(); } return None; } @@ -2629,7 +2623,7 @@ impl OutlinePanel { } fn entry_name(&self, worktree_id: &WorktreeId, entry: &Entry, cx: &App) -> String { - let name = match self.project.read(cx).worktree_for_id(*worktree_id, cx) { + match self.project.read(cx).worktree_for_id(*worktree_id, cx) { Some(worktree) => { let worktree = worktree.read(cx); match worktree.snapshot().root_entry() { @@ -2650,8 +2644,7 @@ impl OutlinePanel { } } None => file_name(entry.path.as_ref()), - }; - name + } } fn update_fs_entries( @@ -2686,7 +2679,8 @@ impl OutlinePanel { new_collapsed_entries = outline_panel.collapsed_entries.clone(); new_unfolded_dirs = outline_panel.unfolded_dirs.clone(); let multi_buffer_snapshot = active_multi_buffer.read(cx).snapshot(cx); - let buffer_excerpts = multi_buffer_snapshot.excerpts().fold( + + multi_buffer_snapshot.excerpts().fold( HashMap::default(), |mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| { let buffer_id = buffer_snapshot.remote_id(); @@ -2733,8 +2727,7 @@ impl OutlinePanel { ); buffer_excerpts }, - ); - buffer_excerpts + ) }) else { return; }; @@ -2833,11 +2826,12 @@ impl OutlinePanel { let new_entry_added = entries_to_add .insert(current_entry.id, current_entry) .is_none(); - if new_entry_added && traversal.back_to_parent() { - if let Some(parent_entry) = traversal.entry() { - current_entry = parent_entry.to_owned(); - continue; - } + if new_entry_added + && traversal.back_to_parent() + && let Some(parent_entry) = traversal.entry() + { + current_entry = parent_entry.to_owned(); + continue; } break; } @@ -2878,18 +2872,17 @@ impl OutlinePanel { entries .into_iter() .filter_map(|entry| { - if auto_fold_dirs { - if let Some(parent) = entry.path.parent() { - let children = new_children_count - .entry(worktree_id) - .or_default() - .entry(Arc::from(parent)) - .or_default(); - if entry.is_dir() { - children.dirs += 1; - } else { - children.files += 1; - } + if auto_fold_dirs && let Some(parent) = entry.path.parent() + { + let children = new_children_count + .entry(worktree_id) + .or_default() + .entry(Arc::from(parent)) + .or_default(); + if entry.is_dir() { + children.dirs += 1; + } else { + children.files += 1; } } @@ -2956,7 +2949,7 @@ impl OutlinePanel { .map(|(parent_dir_id, _)| { new_unfolded_dirs .get(&directory.worktree_id) - .map_or(true, |unfolded_dirs| { + .is_none_or(|unfolded_dirs| { unfolded_dirs .contains(parent_dir_id) }) @@ -3409,30 +3402,29 @@ impl OutlinePanel { { excerpt.outlines = ExcerptOutlines::Outlines(fetched_outlines); - if let Some(default_depth) = pending_default_depth { - if let ExcerptOutlines::Outlines(outlines) = + if let Some(default_depth) = pending_default_depth + && let ExcerptOutlines::Outlines(outlines) = &excerpt.outlines - { - outlines - .iter() - .filter(|outline| { - (default_depth == 0 - || outline.depth >= default_depth) - && outlines_with_children.contains(&( - outline.range.clone(), - outline.depth, - )) - }) - .for_each(|outline| { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - ), - ); - }); - } + { + outlines + .iter() + .filter(|outline| { + (default_depth == 0 + || outline.depth >= default_depth) + && outlines_with_children.contains(&( + outline.range.clone(), + outline.depth, + )) + }) + .for_each(|outline| { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline( + buffer_id, + excerpt_id, + outline.range.clone(), + ), + ); + }); } // Even if no outlines to check, we still need to update cached entries @@ -3448,9 +3440,8 @@ impl OutlinePanel { } fn is_singleton_active(&self, cx: &App) -> bool { - self.active_editor().map_or(false, |active_editor| { - active_editor.read(cx).buffer().read(cx).is_singleton() - }) + self.active_editor() + .is_some_and(|active_editor| active_editor.read(cx).buffer().read(cx).is_singleton()) } fn invalidate_outlines(&mut self, ids: &[ExcerptId]) { @@ -3611,10 +3602,9 @@ impl OutlinePanel { .update_in(cx, |outline_panel, window, cx| { outline_panel.cached_entries = new_cached_entries; outline_panel.max_width_item_index = max_width_item_index; - if outline_panel.selected_entry.is_invalidated() - || matches!(outline_panel.selected_entry, SelectedEntry::None) - { - if let Some(new_selected_entry) = + if (outline_panel.selected_entry.is_invalidated() + || matches!(outline_panel.selected_entry, SelectedEntry::None)) + && let Some(new_selected_entry) = outline_panel.active_editor().and_then(|active_editor| { outline_panel.location_for_editor_selection( &active_editor, @@ -3622,9 +3612,8 @@ impl OutlinePanel { cx, ) }) - { - outline_panel.select_entry(new_selected_entry, false, window, cx); - } + { + outline_panel.select_entry(new_selected_entry, false, window, cx); } outline_panel.autoscroll(cx); @@ -3670,7 +3659,7 @@ impl OutlinePanel { let is_root = project .read(cx) .worktree_for_id(directory_entry.worktree_id, cx) - .map_or(false, |worktree| { + .is_some_and(|worktree| { worktree.read(cx).root_entry() == Some(&directory_entry.entry) }); let folded = auto_fold_dirs @@ -3678,7 +3667,7 @@ impl OutlinePanel { && outline_panel .unfolded_dirs .get(&directory_entry.worktree_id) - .map_or(true, |unfolded_dirs| { + .is_none_or(|unfolded_dirs| { !unfolded_dirs.contains(&directory_entry.entry.id) }); let fs_depth = outline_panel @@ -3758,7 +3747,7 @@ impl OutlinePanel { .iter() .rev() .nth(folded_dirs.entries.len() + 1) - .map_or(true, |parent| parent.expanded); + .is_none_or(|parent| parent.expanded); if start_of_collapsed_dir_sequence || parent_expanded || query.is_some() @@ -3818,7 +3807,7 @@ impl OutlinePanel { .iter() .all(|entry| entry.path != parent.path) }) - .map_or(true, |parent| parent.expanded); + .is_none_or(|parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, @@ -3843,7 +3832,7 @@ impl OutlinePanel { .iter() .all(|entry| entry.path != parent.path) }) - .map_or(true, |parent| parent.expanded); + .is_none_or(|parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, @@ -3921,19 +3910,19 @@ impl OutlinePanel { } else { None }; - if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider { - if !active_editor.read(cx).is_buffer_folded(buffer_id, cx) { - outline_panel.add_excerpt_entries( - &mut generation_state, - buffer_id, - entry_excerpts, - depth, - track_matches, - is_singleton, - query.as_deref(), - cx, - ); - } + if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider + && !active_editor.read(cx).is_buffer_folded(buffer_id, cx) + { + outline_panel.add_excerpt_entries( + &mut generation_state, + buffer_id, + entry_excerpts, + depth, + track_matches, + is_singleton, + query.as_deref(), + cx, + ); } } } @@ -3964,7 +3953,7 @@ impl OutlinePanel { .iter() .all(|entry| entry.path != parent.path) }) - .map_or(true, |parent| parent.expanded); + .is_none_or(|parent| parent.expanded); if parent_expanded || query.is_some() { outline_panel.push_entry( &mut generation_state, @@ -4404,15 +4393,16 @@ impl OutlinePanel { }) .filter(|(match_range, _)| { let editor = active_editor.read(cx); - if let Some(buffer_id) = match_range.start.buffer_id { - if editor.is_buffer_folded(buffer_id, cx) { - return false; - } + let snapshot = editor.buffer().read(cx).snapshot(cx); + if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.start) + && editor.is_buffer_folded(buffer_id, cx) + { + return false; } - if let Some(buffer_id) = match_range.start.buffer_id { - if editor.is_buffer_folded(buffer_id, cx) { - return false; - } + if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.end) + && editor.is_buffer_folded(buffer_id, cx) + { + return false; } true }); @@ -4444,7 +4434,7 @@ impl OutlinePanel { } fn should_replace_active_item(&self, new_active_item: &dyn ItemHandle) -> bool { - self.active_item().map_or(true, |active_item| { + self.active_item().is_none_or(|active_item| { !self.pinned && active_item.item_id() != new_active_item.item_id() }) } @@ -4456,16 +4446,14 @@ impl OutlinePanel { cx: &mut Context, ) { self.pinned = !self.pinned; - if !self.pinned { - if let Some((active_item, active_editor)) = self + if !self.pinned + && let Some((active_item, active_editor)) = self .workspace .upgrade() .and_then(|workspace| workspace_active_editor(workspace.read(cx), cx)) - { - if self.should_replace_active_item(active_item.as_ref()) { - self.replace_active_editor(active_item, active_editor, window, cx); - } - } + && self.should_replace_active_item(active_item.as_ref()) + { + self.replace_active_editor(active_item, active_editor, window, cx); } cx.notify(); @@ -4815,51 +4803,45 @@ impl OutlinePanel { .when(show_indent_guides, |list| { list.with_decoration( ui::indent_guides(px(indent_size), IndentGuideColors::panel(cx)) - .with_compute_indents_fn( - cx.entity().clone(), - |outline_panel, range, _, _| { - let entries = outline_panel.cached_entries.get(range); - if let Some(entries) = entries { - entries.into_iter().map(|item| item.depth).collect() - } else { - smallvec::SmallVec::new() - } - }, - ) - .with_render_fn( - cx.entity().clone(), - move |outline_panel, params, _, _| { - const LEFT_OFFSET: Pixels = px(14.); - - let indent_size = params.indent_size; - let item_height = params.item_height; - let active_indent_guide_ix = find_active_indent_guide_ix( - outline_panel, - ¶ms.indent_guides, - ); + .with_compute_indents_fn(cx.entity(), |outline_panel, range, _, _| { + let entries = outline_panel.cached_entries.get(range); + if let Some(entries) = entries { + entries.iter().map(|item| item.depth).collect() + } else { + smallvec::SmallVec::new() + } + }) + .with_render_fn(cx.entity(), move |outline_panel, params, _, _| { + const LEFT_OFFSET: Pixels = px(14.); + + let indent_size = params.indent_size; + let item_height = params.item_height; + let active_indent_guide_ix = find_active_indent_guide_ix( + outline_panel, + ¶ms.indent_guides, + ); - params - .indent_guides - .into_iter() - .enumerate() - .map(|(ix, layout)| { - let bounds = Bounds::new( - point( - layout.offset.x * indent_size + LEFT_OFFSET, - layout.offset.y * item_height, - ), - size(px(1.), layout.length * item_height), - ); - ui::RenderedIndentGuide { - bounds, - layout, - is_active: active_indent_guide_ix == Some(ix), - hitbox: None, - } - }) - .collect() - }, - ), + params + .indent_guides + .into_iter() + .enumerate() + .map(|(ix, layout)| { + let bounds = Bounds::new( + point( + layout.offset.x * indent_size + LEFT_OFFSET, + layout.offset.y * item_height, + ), + size(px(1.), layout.length * item_height), + ); + ui::RenderedIndentGuide { + bounds, + layout, + is_active: active_indent_guide_ix == Some(ix), + hitbox: None, + } + }) + .collect() + }), ) }) }; @@ -5073,24 +5055,23 @@ impl Panel for OutlinePanel { let old_active = outline_panel.active; outline_panel.active = active; if old_active != active { - if active { - if let Some((active_item, active_editor)) = + if active + && let Some((active_item, active_editor)) = outline_panel.workspace.upgrade().and_then(|workspace| { workspace_active_editor(workspace.read(cx), cx) }) - { - if outline_panel.should_replace_active_item(active_item.as_ref()) { - outline_panel.replace_active_editor( - active_item, - active_editor, - window, - cx, - ); - } else { - outline_panel.update_fs_entries(active_editor, None, window, cx) - } - return; + { + if outline_panel.should_replace_active_item(active_item.as_ref()) { + outline_panel.replace_active_editor( + active_item, + active_editor, + window, + cx, + ); + } else { + outline_panel.update_fs_entries(active_editor, None, window, cx) } + return; } if !outline_panel.pinned { @@ -5111,7 +5092,7 @@ impl Panel for OutlinePanel { impl Focusable for OutlinePanel { fn focus_handle(&self, cx: &App) -> FocusHandle { - self.filter_editor.focus_handle(cx).clone() + self.filter_editor.focus_handle(cx) } } @@ -5325,8 +5306,8 @@ fn subscribe_for_editor_events( }) .copied(), ); - if !ignore_selections_change { - if let Some(entry_to_select) = latest_unfolded_buffer_id + if !ignore_selections_change + && let Some(entry_to_select) = latest_unfolded_buffer_id .or(latest_folded_buffer_id) .and_then(|toggled_buffer_id| { outline_panel.fs_entries.iter().find_map( @@ -5350,16 +5331,15 @@ fn subscribe_for_editor_events( ) }) .map(PanelEntry::Fs) - { - outline_panel.select_entry(entry_to_select, true, window, cx); - } + { + outline_panel.select_entry(entry_to_select, true, window, cx); } outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } EditorEvent::Reparsed(buffer_id) => { if let Some(excerpts) = outline_panel.excerpts.get_mut(buffer_id) { - for (_, excerpt) in excerpts { + for excerpt in excerpts.values_mut() { excerpt.invalidate_outlines(); } } @@ -5504,7 +5484,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5520,7 +5500,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5538,7 +5518,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5575,7 +5555,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5589,7 +5569,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5608,7 +5588,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5636,7 +5616,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5724,7 +5704,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, None, cx, @@ -5747,7 +5727,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, None, cx, @@ -5773,7 +5753,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, None, cx, @@ -5879,7 +5859,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5902,7 +5882,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5939,7 +5919,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -5976,7 +5956,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6079,7 +6059,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6105,7 +6085,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6129,7 +6109,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6150,7 +6130,7 @@ mod tests { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6238,7 +6218,7 @@ struct OutlineEntryExcerpt { assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6265,7 +6245,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6292,7 +6272,7 @@ outline: struct OutlineEntryExcerpt <==== selected assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6319,7 +6299,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6346,7 +6326,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6373,7 +6353,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6400,7 +6380,7 @@ outline: struct OutlineEntryExcerpt <==== selected assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6427,7 +6407,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6454,7 +6434,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6481,7 +6461,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6508,7 +6488,7 @@ outline: struct OutlineEntryExcerpt <==== selected assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6614,7 +6594,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6651,7 +6631,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6679,7 +6659,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6711,7 +6691,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6742,7 +6722,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -6870,7 +6850,7 @@ outline: struct OutlineEntryExcerpt .render_data .get_or_init(|| SearchData::new( &search_entry.match_range, - &multi_buffer_snapshot + multi_buffer_snapshot )) .context_text ) @@ -7261,7 +7241,7 @@ outline: struct OutlineEntryExcerpt assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7320,7 +7300,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7344,7 +7324,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7409,7 +7389,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7550,7 +7530,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7588,7 +7568,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7622,7 +7602,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, @@ -7654,7 +7634,7 @@ outline: fn main()" assert_eq!( display_entries( &project, - &snapshot(&outline_panel, cx), + &snapshot(outline_panel, cx), &outline_panel.cached_entries, outline_panel.selected_entry(), cx, diff --git a/crates/panel/src/panel.rs b/crates/panel/src/panel.rs index 658a51167ba7da3f02c49ab77b50e72dabbbae57..1930f654e9b632e52719103e5b0a399cfe94f70a 100644 --- a/crates/panel/src/panel.rs +++ b/crates/panel/src/panel.rs @@ -52,7 +52,7 @@ impl RenderOnce for PanelTab { pub fn panel_button(label: impl Into) -> ui::Button { let label = label.into(); - let id = ElementId::Name(label.clone().to_lowercase().replace(' ', "_").into()); + let id = ElementId::Name(label.to_lowercase().replace(' ', "_").into()); ui::Button::new(id, label) .label_size(ui::LabelSize::Small) .icon_size(ui::IconSize::Small) diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 47a0f12c0634dbde48d015e4f577519babc67b34..aab0354c9696f8bcdde5fd4bb00bd3651ac4b888 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -41,7 +41,7 @@ pub fn remote_server_dir_relative() -> &'static Path { /// # Arguments /// /// * `dir` - The path to use as the custom data directory. This will be used as the base -/// directory for all user data, including databases, extensions, and logs. +/// directory for all user data, including databases, extensions, and logs. /// /// # Returns /// diff --git a/crates/picker/src/popover_menu.rs b/crates/picker/src/popover_menu.rs index d05308ee71e87a472ffcb33e9727ef74fae70602..baf0918fd6c8e20211d04a150af9220cb2d66839 100644 --- a/crates/picker/src/popover_menu.rs +++ b/crates/picker/src/popover_menu.rs @@ -85,7 +85,7 @@ where .menu(move |_window, _cx| Some(picker.clone())) .trigger_with_tooltip(self.trigger, self.tooltip) .anchor(self.anchor) - .when_some(self.handle.clone(), |menu, handle| menu.with_handle(handle)) + .when_some(self.handle, |menu, handle| menu.with_handle(handle)) .offset(gpui::Point { x: px(0.0), y: px(-2.0), diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 33320e6845964932aa7bfe051f3ffe4fba1a6168..32e39d466f1a236da72b746fb4bf2a24b7300385 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -119,7 +119,7 @@ impl Prettier { None } }).any(|workspace_definition| { - workspace_definition == subproject_path.to_string_lossy() || PathMatcher::new(&[workspace_definition]).ok().map_or(false, |path_matcher| path_matcher.is_match(subproject_path)) + workspace_definition == subproject_path.to_string_lossy() || PathMatcher::new(&[workspace_definition]).ok().is_some_and(|path_matcher| path_matcher.is_match(subproject_path)) }) { anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Path {path_to_check:?} is the workspace root for project in {closest_package_json_path:?}, but it has no prettier installed"); log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {closest_package_json_path:?}"); @@ -185,11 +185,11 @@ impl Prettier { .metadata(&ignore_path) .await .with_context(|| format!("fetching metadata for {ignore_path:?}"))? + && !metadata.is_dir + && !metadata.is_symlink { - if !metadata.is_dir && !metadata.is_symlink { - log::info!("Found prettier ignore at {ignore_path:?}"); - return Ok(ControlFlow::Continue(Some(path_to_check))); - } + log::info!("Found prettier ignore at {ignore_path:?}"); + return Ok(ControlFlow::Continue(Some(path_to_check))); } match &closest_package_json_path { None => closest_package_json_path = Some(path_to_check.clone()), @@ -217,19 +217,19 @@ impl Prettier { workspace_definition == subproject_path.to_string_lossy() || PathMatcher::new(&[workspace_definition]) .ok() - .map_or(false, |path_matcher| { + .is_some_and(|path_matcher| { path_matcher.is_match(subproject_path) }) }) { let workspace_ignore = path_to_check.join(".prettierignore"); - if let Some(metadata) = fs.metadata(&workspace_ignore).await? { - if !metadata.is_dir { - log::info!( - "Found prettier ignore at workspace root {workspace_ignore:?}" - ); - return Ok(ControlFlow::Continue(Some(path_to_check))); - } + if let Some(metadata) = fs.metadata(&workspace_ignore).await? + && !metadata.is_dir + { + log::info!( + "Found prettier ignore at workspace root {workspace_ignore:?}" + ); + return Ok(ControlFlow::Continue(Some(path_to_check))); } } } @@ -549,18 +549,16 @@ async fn read_package_json( .metadata(&possible_package_json) .await .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))? + && !package_json_metadata.is_dir + && !package_json_metadata.is_symlink { - if !package_json_metadata.is_dir && !package_json_metadata.is_symlink { - let package_json_contents = fs - .load(&possible_package_json) - .await - .with_context(|| format!("reading {possible_package_json:?} file contents"))?; - return serde_json::from_str::>( - &package_json_contents, - ) + let package_json_contents = fs + .load(&possible_package_json) + .await + .with_context(|| format!("reading {possible_package_json:?} file contents"))?; + return serde_json::from_str::>(&package_json_contents) .map(Some) .with_context(|| format!("parsing {possible_package_json:?} file contents")); - } } Ok(None) } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b8101e14f39b4faf54b76eaab955864e4ef82ae5..295bad6e596252cbbeecb36b587b696ccbab32a0 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -88,9 +88,18 @@ pub enum BufferStoreEvent { }, } -#[derive(Default, Debug)] +#[derive(Default, Debug, Clone)] pub struct ProjectTransaction(pub HashMap, language::Transaction>); +impl PartialEq for ProjectTransaction { + fn eq(&self, other: &Self) -> bool { + self.0.len() == other.0.len() + && self.0.iter().all(|(buffer, transaction)| { + other.0.get(buffer).is_some_and(|t| t.id == transaction.id) + }) + } +} + impl EventEmitter for BufferStore {} impl RemoteBufferStore { @@ -168,7 +177,7 @@ impl RemoteBufferStore { .with_context(|| { format!("no worktree found for id {}", file.worktree_id) })?; - buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?) + buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?) as Arc); } Buffer::from_proto(replica_id, capability, state, buffer_file) @@ -234,7 +243,7 @@ impl RemoteBufferStore { } } } - return Ok(None); + Ok(None) } pub fn incomplete_buffer_ids(&self) -> Vec { @@ -413,13 +422,10 @@ impl LocalBufferStore { cx: &mut Context, ) { cx.subscribe(worktree, |this, worktree, event, cx| { - if worktree.read(cx).is_local() { - match event { - worktree::Event::UpdatedEntries(changes) => { - Self::local_worktree_entries_changed(this, &worktree, changes, cx); - } - _ => {} - } + if worktree.read(cx).is_local() + && let worktree::Event::UpdatedEntries(changes) = event + { + Self::local_worktree_entries_changed(this, &worktree, changes, cx); } }) .detach(); @@ -594,7 +600,7 @@ impl LocalBufferStore { else { return Task::ready(Err(anyhow!("no such worktree"))); }; - self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx) + self.save_local_buffer(buffer, worktree, path.path, true, cx) } fn open_buffer( @@ -848,7 +854,7 @@ impl BufferStore { ) -> Task> { match &mut self.state { BufferStoreState::Local(this) => this.save_buffer(buffer, cx), - BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx), + BufferStoreState::Remote(this) => this.save_remote_buffer(buffer, None, cx), } } @@ -947,10 +953,9 @@ impl BufferStore { } pub fn get_by_path(&self, path: &ProjectPath) -> Option> { - self.path_to_buffer_id.get(path).and_then(|buffer_id| { - let buffer = self.get(*buffer_id); - buffer - }) + self.path_to_buffer_id + .get(path) + .and_then(|buffer_id| self.get(*buffer_id)) } pub fn get(&self, buffer_id: BufferId) -> Option> { @@ -1094,10 +1099,10 @@ impl BufferStore { .collect::>() })?; for buffer_task in buffers { - if let Some(buffer) = buffer_task.await.log_err() { - if tx.send(buffer).await.is_err() { - return anyhow::Ok(()); - } + if let Some(buffer) = buffer_task.await.log_err() + && tx.send(buffer).await.is_err() + { + return anyhow::Ok(()); } } } @@ -1142,7 +1147,7 @@ impl BufferStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let payload = envelope.payload.clone(); + let payload = envelope.payload; let buffer_id = BufferId::new(payload.buffer_id)?; let ops = payload .operations @@ -1173,11 +1178,11 @@ impl BufferStore { buffer_id: BufferId, handle: OpenLspBufferHandle, ) { - if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) { - if let Some(buffer) = shared_buffers.get_mut(&buffer_id) { - buffer.lsp_handle = Some(handle); - return; - } + if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) + && let Some(buffer) = shared_buffers.get_mut(&buffer_id) + { + buffer.lsp_handle = Some(handle); + return; } debug_panic!("tried to register shared lsp handle, but buffer was not shared") } @@ -1313,10 +1318,7 @@ impl BufferStore { let new_path = file.path.clone(); buffer.file_updated(Arc::new(file), cx); - if old_file - .as_ref() - .map_or(true, |old| *old.path() != new_path) - { + if old_file.as_ref().is_none_or(|old| *old.path() != new_path) { Some(old_file) } else { None @@ -1345,7 +1347,7 @@ impl BufferStore { mut cx: AsyncApp, ) -> Result { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; - let (buffer, project_id) = this.read_with(&mut cx, |this, _| { + let (buffer, project_id) = this.read_with(&cx, |this, _| { anyhow::Ok(( this.get_existing(buffer_id)?, this.downstream_client @@ -1359,7 +1361,7 @@ impl BufferStore { buffer.wait_for_version(deserialize_version(&envelope.payload.version)) })? .await?; - let buffer_id = buffer.read_with(&mut cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?; if let Some(new_path) = envelope.payload.new_path { let new_path = ProjectPath::from_proto(new_path); @@ -1372,7 +1374,7 @@ impl BufferStore { .await?; } - buffer.read_with(&mut cx, |buffer, _| proto::BufferSaved { + buffer.read_with(&cx, |buffer, _| proto::BufferSaved { project_id, buffer_id: buffer_id.into(), version: serialize_version(buffer.saved_version()), @@ -1388,14 +1390,14 @@ impl BufferStore { let peer_id = envelope.sender_id; let buffer_id = BufferId::new(envelope.payload.buffer_id)?; this.update(&mut cx, |this, cx| { - if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { - if shared.remove(&buffer_id).is_some() { - cx.emit(BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id)); - if shared.is_empty() { - this.shared_buffers.remove(&peer_id); - } - return; + if let Some(shared) = this.shared_buffers.get_mut(&peer_id) + && shared.remove(&buffer_id).is_some() + { + cx.emit(BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id)); + if shared.is_empty() { + this.shared_buffers.remove(&peer_id); } + return; } debug_panic!( "peer_id {} closed buffer_id {} which was either not open or already closed", diff --git a/crates/project/src/color_extractor.rs b/crates/project/src/color_extractor.rs index 5473da88af5bee6e66b005956366a289478f7ee4..6e9907e30b7393a3074f4af579536d74140418f9 100644 --- a/crates/project/src/color_extractor.rs +++ b/crates/project/src/color_extractor.rs @@ -4,8 +4,8 @@ use gpui::{Hsla, Rgba}; use lsp::{CompletionItem, Documentation}; use regex::{Regex, RegexBuilder}; -const HEX: &'static str = r#"(#(?:[\da-fA-F]{3}){1,2})"#; -const RGB_OR_HSL: &'static str = r#"(rgba?|hsla?)\(\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*(?:,\s*(1|0?\.\d+))?\s*\)"#; +const HEX: &str = r#"(#(?:[\da-fA-F]{3}){1,2})"#; +const RGB_OR_HSL: &str = r#"(rgba?|hsla?)\(\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*(?:,\s*(1|0?\.\d+))?\s*\)"#; static RELAXED_HEX_REGEX: LazyLock = LazyLock::new(|| { RegexBuilder::new(HEX) @@ -102,7 +102,7 @@ fn parse(str: &str, mode: ParseMode) -> Option { }; } - return None; + None } fn parse_component(value: &str, max: f32) -> Option { @@ -141,7 +141,7 @@ mod tests { use gpui::rgba; use lsp::{CompletionItem, CompletionItemKind}; - pub const COLOR_TABLE: &[(&'static str, Option)] = &[ + pub const COLOR_TABLE: &[(&str, Option)] = &[ // -- Invalid -- // Invalid hex ("f0f", None), diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index c96ab4e8f3ba87133d9b64e9701130f5d32adfb9..49a430c26110fc58a9494d414ecbcf45a6c76c49 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -368,7 +368,7 @@ impl ContextServerStore { } pub fn restart_server(&mut self, id: &ContextServerId, cx: &mut Context) -> Result<()> { - if let Some(state) = self.servers.get(&id) { + if let Some(state) = self.servers.get(id) { let configuration = state.configuration(); self.stop_server(&state.server().id(), cx)?; @@ -397,9 +397,8 @@ impl ContextServerStore { let server = server.clone(); let configuration = configuration.clone(); async move |this, cx| { - match server.clone().start(&cx).await { + match server.clone().start(cx).await { Ok(_) => { - log::info!("Started {} context server", id); debug_assert!(server.client().is_some()); this.update(cx, |this, cx| { @@ -588,7 +587,7 @@ impl ContextServerStore { for server_id in this.servers.keys() { // All servers that are not in desired_servers should be removed from the store. // This can happen if the user removed a server from the context server settings. - if !configured_servers.contains_key(&server_id) { + if !configured_servers.contains_key(server_id) { if disabled_servers.contains_key(&server_id.0) { servers_to_stop.insert(server_id.clone()); } else { @@ -642,8 +641,8 @@ mod tests { #[gpui::test] async fn test_context_server_status(cx: &mut TestAppContext) { - const SERVER_1_ID: &'static str = "mcp-1"; - const SERVER_2_ID: &'static str = "mcp-2"; + const SERVER_1_ID: &str = "mcp-1"; + const SERVER_2_ID: &str = "mcp-2"; let (_fs, project) = setup_context_server_test( cx, @@ -722,8 +721,8 @@ mod tests { #[gpui::test] async fn test_context_server_status_events(cx: &mut TestAppContext) { - const SERVER_1_ID: &'static str = "mcp-1"; - const SERVER_2_ID: &'static str = "mcp-2"; + const SERVER_1_ID: &str = "mcp-1"; + const SERVER_2_ID: &str = "mcp-2"; let (_fs, project) = setup_context_server_test( cx, @@ -761,7 +760,7 @@ mod tests { &store, vec![ (server_1_id.clone(), ContextServerStatus::Starting), - (server_1_id.clone(), ContextServerStatus::Running), + (server_1_id, ContextServerStatus::Running), (server_2_id.clone(), ContextServerStatus::Starting), (server_2_id.clone(), ContextServerStatus::Running), (server_2_id.clone(), ContextServerStatus::Stopped), @@ -784,7 +783,7 @@ mod tests { #[gpui::test(iterations = 25)] async fn test_context_server_concurrent_starts(cx: &mut TestAppContext) { - const SERVER_1_ID: &'static str = "mcp-1"; + const SERVER_1_ID: &str = "mcp-1"; let (_fs, project) = setup_context_server_test( cx, @@ -845,8 +844,8 @@ mod tests { #[gpui::test] async fn test_context_server_maintain_servers_loop(cx: &mut TestAppContext) { - const SERVER_1_ID: &'static str = "mcp-1"; - const SERVER_2_ID: &'static str = "mcp-2"; + const SERVER_1_ID: &str = "mcp-1"; + const SERVER_2_ID: &str = "mcp-2"; let server_1_id = ContextServerId(SERVER_1_ID.into()); let server_2_id = ContextServerId(SERVER_2_ID.into()); @@ -1084,7 +1083,7 @@ mod tests { #[gpui::test] async fn test_context_server_enabled_disabled(cx: &mut TestAppContext) { - const SERVER_1_ID: &'static str = "mcp-1"; + const SERVER_1_ID: &str = "mcp-1"; let server_1_id = ContextServerId(SERVER_1_ID.into()); diff --git a/crates/project/src/context_server_store/extension.rs b/crates/project/src/context_server_store/extension.rs index 1eb0fe7da129ba9dbd3ee640cb6e02474a3990b6..2a3a0c2e4b99e56c66993d0db1fbec5b3fb9ef29 100644 --- a/crates/project/src/context_server_store/extension.rs +++ b/crates/project/src/context_server_store/extension.rs @@ -63,7 +63,7 @@ impl registry::ContextServerDescriptor for ContextServerDescriptor { .await?; command.command = extension.path_from_extension(&command.command); - log::info!("loaded command for context server {id}: {command:?}"); + log::debug!("loaded command for context server {id}: {command:?}"); Ok(ContextServerCommand { path: command.command, diff --git a/crates/project/src/debugger.rs b/crates/project/src/debugger.rs index 6c22468040097768688d93cde0720320a9e45be9..0bf6a0d61b792bd747992a821adc82150d93c8bf 100644 --- a/crates/project/src/debugger.rs +++ b/crates/project/src/debugger.rs @@ -6,9 +6,9 @@ //! //! There are few reasons for this divide: //! - Breakpoints persist across debug sessions and they're not really specific to any particular session. Sure, we have to send protocol messages for them -//! (so they're a "thing" in the protocol), but we also want to set them before any session starts up. +//! (so they're a "thing" in the protocol), but we also want to set them before any session starts up. //! - Debug clients are doing the heavy lifting, and this is where UI grabs all of it's data from. They also rely on breakpoint store during initialization to obtain -//! current set of breakpoints. +//! current set of breakpoints. //! - Since DAP store knows about all of the available debug sessions, it is responsible for routing RPC requests to sessions. It also knows how to find adapters for particular kind of session. pub mod breakpoint_store; diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index 025dca410069db0350d8d32509244a4889c62415..c47e5d35d5948eb0c176bbc6d14281faa3f60451 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -192,7 +192,7 @@ impl BreakpointStore { } pub(crate) fn shared(&mut self, project_id: u64, downstream_client: AnyProtoClient) { - self.downstream_client = Some((downstream_client.clone(), project_id)); + self.downstream_client = Some((downstream_client, project_id)); } pub(crate) fn unshared(&mut self, cx: &mut Context) { @@ -267,7 +267,7 @@ impl BreakpointStore { message: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let breakpoints = this.read_with(&mut cx, |this, _| this.breakpoint_store())?; + let breakpoints = this.read_with(&cx, |this, _| this.breakpoint_store())?; let path = this .update(&mut cx, |this, cx| { this.project_path_for_absolute_path(message.payload.path.as_ref(), cx) @@ -317,8 +317,8 @@ impl BreakpointStore { .iter() .filter_map(|breakpoint| { breakpoint.bp.bp.to_proto( - &path, - &breakpoint.position(), + path, + breakpoint.position(), &breakpoint.session_state, ) }) @@ -450,9 +450,9 @@ impl BreakpointStore { }); if let Some(found_bp) = found_bp { - found_bp.message = Some(log_message.clone()); + found_bp.message = Some(log_message); } else { - breakpoint.bp.message = Some(log_message.clone()); + breakpoint.bp.message = Some(log_message); // We did not remove any breakpoint, hence let's toggle one. breakpoint_set .breakpoints @@ -482,9 +482,9 @@ impl BreakpointStore { }); if let Some(found_bp) = found_bp { - found_bp.hit_condition = Some(hit_condition.clone()); + found_bp.hit_condition = Some(hit_condition); } else { - breakpoint.bp.hit_condition = Some(hit_condition.clone()); + breakpoint.bp.hit_condition = Some(hit_condition); // We did not remove any breakpoint, hence let's toggle one. breakpoint_set .breakpoints @@ -514,9 +514,9 @@ impl BreakpointStore { }); if let Some(found_bp) = found_bp { - found_bp.condition = Some(condition.clone()); + found_bp.condition = Some(condition); } else { - breakpoint.bp.condition = Some(condition.clone()); + breakpoint.bp.condition = Some(condition); // We did not remove any breakpoint, hence let's toggle one. breakpoint_set .breakpoints @@ -591,7 +591,7 @@ impl BreakpointStore { cx: &mut Context, ) { if let Some(breakpoints) = self.breakpoints.remove(&old_path) { - self.breakpoints.insert(new_path.clone(), breakpoints); + self.breakpoints.insert(new_path, breakpoints); cx.notify(); } @@ -623,12 +623,11 @@ impl BreakpointStore { file_breakpoints.breakpoints.iter().filter_map({ let range = range.clone(); move |bp| { - if let Some(range) = &range { - if bp.position().cmp(&range.start, buffer_snapshot).is_lt() - || bp.position().cmp(&range.end, buffer_snapshot).is_gt() - { - return None; - } + if let Some(range) = &range + && (bp.position().cmp(&range.start, buffer_snapshot).is_lt() + || bp.position().cmp(&range.end, buffer_snapshot).is_gt()) + { + return None; } let session_state = active_session_id .and_then(|id| bp.session_state.get(&id)) @@ -753,7 +752,7 @@ impl BreakpointStore { .iter() .map(|breakpoint| { let position = snapshot - .summary_for_anchor::(&breakpoint.position()) + .summary_for_anchor::(breakpoint.position()) .row; let breakpoint = &breakpoint.bp; SourceBreakpoint { @@ -832,7 +831,6 @@ impl BreakpointStore { new_breakpoints.insert(path, breakpoints_for_file); } this.update(cx, |this, cx| { - log::info!("Finish deserializing breakpoints & initializing breakpoint store"); for (path, count) in new_breakpoints.iter().map(|(path, bp_in_file)| { (path.to_string_lossy(), bp_in_file.breakpoints.len()) }) { @@ -906,7 +904,7 @@ impl BreakpointState { } #[inline] - pub fn to_int(&self) -> i32 { + pub fn to_int(self) -> i32 { match self { BreakpointState::Enabled => 0, BreakpointState::Disabled => 1, diff --git a/crates/project/src/debugger/dap_command.rs b/crates/project/src/debugger/dap_command.rs index 3be3192369452b58fd2382471ca2f41f4aeac75f..772ff2dcfeb98fcda794092f8071fad5c6fcdcd4 100644 --- a/crates/project/src/debugger/dap_command.rs +++ b/crates/project/src/debugger/dap_command.rs @@ -1454,7 +1454,7 @@ impl DapCommand for EvaluateCommand { variables_reference: message.variable_reference, named_variables: message.named_variables, indexed_variables: message.indexed_variables, - memory_reference: message.memory_reference.clone(), + memory_reference: message.memory_reference, value_location_reference: None, //TODO }) } diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 6f834b5dc0cfd3fc6357d92403bdb7cbfefdd4b0..834bf2c2d2328e0c8b8d4ea211feb9bf255a0546 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -215,7 +215,7 @@ impl DapStore { dap_settings.and_then(|s| s.binary.as_ref().map(PathBuf::from)); let user_args = dap_settings.map(|s| s.args.clone()); - let delegate = self.delegate(&worktree, console, cx); + let delegate = self.delegate(worktree, console, cx); let cwd: Arc = worktree.read(cx).abs_path().as_ref().into(); cx.spawn(async move |this, cx| { @@ -470,9 +470,8 @@ impl DapStore { session_id: impl Borrow, ) -> Option> { let session_id = session_id.borrow(); - let client = self.sessions.get(session_id).cloned(); - client + self.sessions.get(session_id).cloned() } pub fn sessions(&self) -> impl Iterator> { self.sessions.values() @@ -685,7 +684,7 @@ impl DapStore { let shutdown_id = parent_session.update(cx, |parent_session, _| { parent_session.remove_child_session_id(session_id); - if parent_session.child_session_ids().len() == 0 { + if parent_session.child_session_ids().is_empty() { Some(parent_session.session_id()) } else { None @@ -702,7 +701,7 @@ impl DapStore { cx.emit(DapStoreEvent::DebugClientShutdown(session_id)); cx.background_spawn(async move { - if shutdown_children.len() > 0 { + if !shutdown_children.is_empty() { let _ = join_all(shutdown_children).await; } @@ -722,7 +721,7 @@ impl DapStore { downstream_client: AnyProtoClient, _: &mut Context, ) { - self.downstream_client = Some((downstream_client.clone(), project_id)); + self.downstream_client = Some((downstream_client, project_id)); } pub fn unshared(&mut self, cx: &mut Context) { @@ -902,7 +901,7 @@ impl dap::adapters::DapDelegate for DapAdapterDelegate { } fn worktree_root_path(&self) -> &Path { - &self.worktree.abs_path() + self.worktree.abs_path() } fn http_client(&self) -> Arc { self.http_client.clone() diff --git a/crates/project/src/debugger/locators/cargo.rs b/crates/project/src/debugger/locators/cargo.rs index fa265dae586148f9c8efe14187ee26c805c65e42..3e28fac8af22930876d096d5b7773a0825becf4f 100644 --- a/crates/project/src/debugger/locators/cargo.rs +++ b/crates/project/src/debugger/locators/cargo.rs @@ -146,7 +146,7 @@ impl DapLocator for CargoLocator { let is_test = build_config .args .first() - .map_or(false, |arg| arg == "test" || arg == "t"); + .is_some_and(|arg| arg == "test" || arg == "t"); let executables = output .lines() @@ -187,12 +187,12 @@ impl DapLocator for CargoLocator { .cloned(); } let executable = { - if let Some(ref name) = test_name.as_ref().and_then(|name| { + if let Some(name) = test_name.as_ref().and_then(|name| { name.strip_prefix('$') .map(|name| build_config.env.get(name)) .unwrap_or(Some(name)) }) { - find_best_executable(&executables, &name).await + find_best_executable(&executables, name).await } else { None } diff --git a/crates/project/src/debugger/locators/go.rs b/crates/project/src/debugger/locators/go.rs index 61436fce8f3659d4b12c3010b82e0d845654c4e9..eec06084ec78548e1a627080663d2afccc8a0aca 100644 --- a/crates/project/src/debugger/locators/go.rs +++ b/crates/project/src/debugger/locators/go.rs @@ -174,7 +174,7 @@ impl DapLocator for GoLocator { request: "launch".to_string(), mode: "test".to_string(), program, - args: args, + args, build_flags, cwd: build_config.cwd.clone(), env: build_config.env.clone(), @@ -185,7 +185,7 @@ impl DapLocator for GoLocator { label: resolved_label.to_string().into(), adapter: adapter.0.clone(), build: None, - config: config, + config, tcp_connection: None, }) } @@ -220,7 +220,7 @@ impl DapLocator for GoLocator { request: "launch".to_string(), mode: "debug".to_string(), program, - args: args, + args, build_flags, }) .unwrap(); diff --git a/crates/project/src/debugger/locators/python.rs b/crates/project/src/debugger/locators/python.rs index 3de1281aed36c6a96970d08e0e4f5cb0ef3bd67f..71efbb75b91c819fcfdb857769877452f9e5a730 100644 --- a/crates/project/src/debugger/locators/python.rs +++ b/crates/project/src/debugger/locators/python.rs @@ -28,9 +28,7 @@ impl DapLocator for PythonLocator { let valid_program = build_config.command.starts_with("$ZED_") || Path::new(&build_config.command) .file_name() - .map_or(false, |name| { - name.to_str().is_some_and(|path| path.starts_with("python")) - }); + .is_some_and(|name| name.to_str().is_some_and(|path| path.starts_with("python"))); if !valid_program || build_config.args.iter().any(|arg| arg == "-c") { // We cannot debug selections. return None; diff --git a/crates/project/src/debugger/memory.rs b/crates/project/src/debugger/memory.rs index fec3c344c5a433eebb3a1f314a8fd911bd603022..42ad64e6880ba653c6c95cb13f0e6bcc23c9bdae 100644 --- a/crates/project/src/debugger/memory.rs +++ b/crates/project/src/debugger/memory.rs @@ -3,6 +3,7 @@ //! Each byte in memory can either be mapped or unmapped. We try to mimic that twofold: //! - We assume that the memory is divided into pages of a fixed size. //! - We assume that each page can be either mapped or unmapped. +//! //! These two assumptions drive the shape of the memory representation. //! In particular, we want the unmapped pages to be represented without allocating any memory, as *most* //! of the memory in a program space is usually unmapped. @@ -165,8 +166,8 @@ impl Memory { /// - If it succeeds/fails wholesale, cool; we have no unknown memory regions in this page. /// - If it succeeds partially, we know # of mapped bytes. /// We might also know the # of unmapped bytes. -/// However, we're still unsure about what's *after* the unreadable region. /// +/// However, we're still unsure about what's *after* the unreadable region. /// This is where this builder comes in. It lets us track the state of figuring out contents of a single page. pub(super) struct MemoryPageBuilder { chunks: MappedPageContents, @@ -318,19 +319,18 @@ impl Iterator for MemoryIterator { return None; } if let Some((current_page_address, current_memory_chunk)) = self.current_known_page.as_mut() + && current_page_address.0 <= self.start { - if current_page_address.0 <= self.start { - if let Some(next_cell) = current_memory_chunk.next() { - self.start += 1; - return Some(next_cell); - } else { - self.current_known_page.take(); - } + if let Some(next_cell) = current_memory_chunk.next() { + self.start += 1; + return Some(next_cell); + } else { + self.current_known_page.take(); } } if !self.fetch_next_page() { self.start += 1; - return Some(MemoryCell(None)); + Some(MemoryCell(None)) } else { self.next() } diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index d9c28df497b3baa4543e6271106ddb1cd11b4419..81cb3ade2e18b6430c4b644495529c4567344da5 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -226,7 +226,7 @@ impl RunningMode { fn unset_breakpoints_from_paths(&self, paths: &Vec>, cx: &mut App) -> Task<()> { let tasks: Vec<_> = paths - .into_iter() + .iter() .map(|path| { self.request(dap_command::SetBreakpoints { source: client_source(path), @@ -431,7 +431,7 @@ impl RunningMode { let should_send_exception_breakpoints = capabilities .exception_breakpoint_filters .as_ref() - .map_or(false, |filters| !filters.is_empty()) + .is_some_and(|filters| !filters.is_empty()) || !configuration_done_supported; let supports_exception_filters = capabilities .supports_exception_filter_options @@ -508,13 +508,12 @@ impl RunningMode { .ok(); } - let ret = if configuration_done_supported { + if configuration_done_supported { this.request(ConfigurationDone {}) } else { Task::ready(Ok(())) } - .await; - ret + .await } }); @@ -710,9 +709,7 @@ where T: LocalDapCommand + PartialEq + Eq + Hash, { fn dyn_eq(&self, rhs: &dyn CacheableCommand) -> bool { - (rhs as &dyn Any) - .downcast_ref::() - .map_or(false, |rhs| self == rhs) + (rhs as &dyn Any).downcast_ref::() == Some(self) } fn dyn_hash(&self, mut hasher: &mut dyn Hasher) { @@ -841,7 +838,7 @@ impl Session { }) .detach(); - let this = Self { + Self { mode: SessionState::Booting(None), id: session_id, child_session_ids: HashSet::default(), @@ -870,9 +867,7 @@ impl Session { task_context, memory: memory::Memory::new(), quirks, - }; - - this + } }) } @@ -1085,7 +1080,7 @@ impl Session { }) .detach(); - return tx; + tx } pub fn is_started(&self) -> bool { @@ -1399,7 +1394,7 @@ impl Session { let breakpoint_store = self.breakpoint_store.clone(); if let Some((local, path)) = self.as_running_mut().and_then(|local| { let breakpoint = local.tmp_breakpoint.take()?; - let path = breakpoint.path.clone(); + let path = breakpoint.path; Some((local, path)) }) { local @@ -1630,7 +1625,7 @@ impl Session { + 'static, cx: &mut Context, ) -> Task> { - if !T::is_supported(&capabilities) { + if !T::is_supported(capabilities) { log::warn!( "Attempted to send a DAP request that isn't supported: {:?}", request @@ -1688,7 +1683,7 @@ impl Session { self.requests .entry((&*key.0 as &dyn Any).type_id()) .and_modify(|request_map| { - request_map.remove(&key); + request_map.remove(key); }); } @@ -1715,7 +1710,7 @@ impl Session { this.threads = result .into_iter() - .map(|thread| (ThreadId(thread.id), Thread::from(thread.clone()))) + .map(|thread| (ThreadId(thread.id), Thread::from(thread))) .collect(); this.invalidate_command_type::(); @@ -2558,10 +2553,7 @@ impl Session { mode: Option, cx: &mut Context, ) -> Task> { - let command = DataBreakpointInfoCommand { - context: context.clone(), - mode, - }; + let command = DataBreakpointInfoCommand { context, mode }; self.request(command, |_, response, _| response.ok(), cx) } diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 7379a7ef726c6004fc2b29a5b61a47cb9603fbb3..d109e307a89181f0a416d6d01a3fa74684a138a7 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -198,7 +198,7 @@ async fn load_directory_shell_environment( ); }; - load_shell_environment(&dir, load_direnv).await + load_shell_environment(dir, load_direnv).await } Err(err) => ( None, diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 01fc987816447340bcec77e53ddf77ff72146be9..5cf298a8bff1dbecbe5899020e4463ba658ed719 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -414,6 +414,7 @@ impl GitStore { pub fn init(client: &AnyProtoClient) { client.add_entity_request_handler(Self::handle_get_remotes); client.add_entity_request_handler(Self::handle_get_branches); + client.add_entity_request_handler(Self::handle_get_default_branch); client.add_entity_request_handler(Self::handle_change_branch); client.add_entity_request_handler(Self::handle_create_branch); client.add_entity_request_handler(Self::handle_git_init); @@ -441,6 +442,7 @@ impl GitStore { client.add_entity_request_handler(Self::handle_blame_buffer); client.add_entity_message_handler(Self::handle_update_repository); client.add_entity_message_handler(Self::handle_remove_repository); + client.add_entity_request_handler(Self::handle_git_clone); } pub fn is_local(&self) -> bool { @@ -559,7 +561,7 @@ impl GitStore { pub fn active_repository(&self) -> Option> { self.active_repo_id .as_ref() - .map(|id| self.repositories[&id].clone()) + .map(|id| self.repositories[id].clone()) } pub fn open_unstaged_diff( @@ -568,23 +570,22 @@ impl GitStore { cx: &mut Context, ) -> Task>> { let buffer_id = buffer.read(cx).remote_id(); - if let Some(diff_state) = self.diffs.get(&buffer_id) { - if let Some(unstaged_diff) = diff_state + if let Some(diff_state) = self.diffs.get(&buffer_id) + && let Some(unstaged_diff) = diff_state .read(cx) .unstaged_diff .as_ref() .and_then(|weak| weak.upgrade()) + { + if let Some(task) = + diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation()) { - if let Some(task) = - diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation()) - { - return cx.background_executor().spawn(async move { - task.await; - Ok(unstaged_diff) - }); - } - return Task::ready(Ok(unstaged_diff)); + return cx.background_executor().spawn(async move { + task.await; + Ok(unstaged_diff) + }); } + return Task::ready(Ok(unstaged_diff)); } let Some((repo, repo_path)) = @@ -625,23 +626,22 @@ impl GitStore { ) -> Task>> { let buffer_id = buffer.read(cx).remote_id(); - if let Some(diff_state) = self.diffs.get(&buffer_id) { - if let Some(uncommitted_diff) = diff_state + if let Some(diff_state) = self.diffs.get(&buffer_id) + && let Some(uncommitted_diff) = diff_state .read(cx) .uncommitted_diff .as_ref() .and_then(|weak| weak.upgrade()) + { + if let Some(task) = + diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation()) { - if let Some(task) = - diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation()) - { - return cx.background_executor().spawn(async move { - task.await; - Ok(uncommitted_diff) - }); - } - return Task::ready(Ok(uncommitted_diff)); + return cx.background_executor().spawn(async move { + task.await; + Ok(uncommitted_diff) + }); } + return Task::ready(Ok(uncommitted_diff)); } let Some((repo, repo_path)) = @@ -762,29 +762,26 @@ impl GitStore { log::debug!("open conflict set"); let buffer_id = buffer.read(cx).remote_id(); - if let Some(git_state) = self.diffs.get(&buffer_id) { - if let Some(conflict_set) = git_state + if let Some(git_state) = self.diffs.get(&buffer_id) + && let Some(conflict_set) = git_state .read(cx) .conflict_set .as_ref() .and_then(|weak| weak.upgrade()) - { - let conflict_set = conflict_set.clone(); - let buffer_snapshot = buffer.read(cx).text_snapshot(); + { + let conflict_set = conflict_set; + let buffer_snapshot = buffer.read(cx).text_snapshot(); - git_state.update(cx, |state, cx| { - let _ = state.reparse_conflict_markers(buffer_snapshot, cx); - }); + git_state.update(cx, |state, cx| { + let _ = state.reparse_conflict_markers(buffer_snapshot, cx); + }); - return conflict_set; - } + return conflict_set; } let is_unmerged = self .repository_and_path_for_buffer_id(buffer_id, cx) - .map_or(false, |(repo, path)| { - repo.read(cx).snapshot.has_conflict(&path) - }); + .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path)); let git_store = cx.weak_entity(); let buffer_git_state = self .diffs @@ -915,7 +912,7 @@ impl GitStore { return Task::ready(Err(anyhow!("failed to find a git repository for buffer"))); }; let content = match &version { - Some(version) => buffer.rope_for_version(version).clone(), + Some(version) => buffer.rope_for_version(version), None => buffer.as_rope().clone(), }; let version = version.unwrap_or(buffer.version()); @@ -1149,29 +1146,26 @@ impl GitStore { for (buffer_id, diff) in self.diffs.iter() { if let Some((buffer_repo, repo_path)) = self.repository_and_path_for_buffer_id(*buffer_id, cx) + && buffer_repo == repo { - if buffer_repo == repo { - diff.update(cx, |diff, cx| { - if let Some(conflict_set) = &diff.conflict_set { - let conflict_status_changed = - conflict_set.update(cx, |conflict_set, cx| { - let has_conflict = repo_snapshot.has_conflict(&repo_path); - conflict_set.set_has_conflict(has_conflict, cx) - })?; - if conflict_status_changed { - let buffer_store = self.buffer_store.read(cx); - if let Some(buffer) = buffer_store.get(*buffer_id) { - let _ = diff.reparse_conflict_markers( - buffer.read(cx).text_snapshot(), - cx, - ); - } + diff.update(cx, |diff, cx| { + if let Some(conflict_set) = &diff.conflict_set { + let conflict_status_changed = + conflict_set.update(cx, |conflict_set, cx| { + let has_conflict = repo_snapshot.has_conflict(&repo_path); + conflict_set.set_has_conflict(has_conflict, cx) + })?; + if conflict_status_changed { + let buffer_store = self.buffer_store.read(cx); + if let Some(buffer) = buffer_store.get(*buffer_id) { + let _ = diff + .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx); } } - anyhow::Ok(()) - }) - .ok(); - } + } + anyhow::Ok(()) + }) + .ok(); } } cx.emit(GitStoreEvent::RepositoryUpdated( @@ -1275,7 +1269,7 @@ impl GitStore { ) { match event { BufferStoreEvent::BufferAdded(buffer) => { - cx.subscribe(&buffer, |this, buffer, event, cx| { + cx.subscribe(buffer, |this, buffer, event, cx| { if let BufferEvent::LanguageChanged = event { let buffer_id = buffer.read(cx).remote_id(); if let Some(diff_state) = this.diffs.get(&buffer_id) { @@ -1293,7 +1287,7 @@ impl GitStore { } } BufferStoreEvent::BufferDropped(buffer_id) => { - self.diffs.remove(&buffer_id); + self.diffs.remove(buffer_id); for diffs in self.shared_diffs.values_mut() { diffs.remove(buffer_id); } @@ -1382,8 +1376,8 @@ impl GitStore { repository.update(cx, |repository, cx| { let repo_abs_path = &repository.work_directory_abs_path; if changed_repos.iter().any(|update| { - update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path) - || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path) + update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path) + || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path) }) { repository.reload_buffer_diff_bases(cx); } @@ -1464,6 +1458,45 @@ impl GitStore { } } + pub fn git_clone( + &self, + repo: String, + path: impl Into>, + cx: &App, + ) -> Task> { + let path = path.into(); + match &self.state { + GitStoreState::Local { fs, .. } => { + let fs = fs.clone(); + cx.background_executor() + .spawn(async move { fs.git_clone(&repo, &path).await }) + } + GitStoreState::Ssh { + upstream_client, + upstream_project_id, + .. + } => { + let request = upstream_client.request(proto::GitClone { + project_id: upstream_project_id.0, + abs_path: path.to_string_lossy().to_string(), + remote_repo: repo, + }); + + cx.background_spawn(async move { + let result = request.await?; + + match result.success { + true => Ok(()), + false => Err(anyhow!("Git Clone failed")), + } + }) + } + GitStoreState::Remote { .. } => { + Task::ready(Err(anyhow!("Git Clone isn't supported for remote users"))) + } + } + } + async fn handle_update_repository( this: Entity, envelope: TypedEnvelope, @@ -1473,10 +1506,7 @@ impl GitStore { let mut update = envelope.payload; let id = RepositoryId::from_proto(update.id); - let client = this - .upstream_client() - .context("no upstream client")? - .clone(); + let client = this.upstream_client().context("no upstream client")?; let mut is_new = false; let repo = this.repositories.entry(id).or_insert_with(|| { @@ -1495,7 +1525,7 @@ impl GitStore { }); if is_new { this._subscriptions - .push(cx.subscribe(&repo, Self::on_repository_event)) + .push(cx.subscribe(repo, Self::on_repository_event)) } repo.update(cx, { @@ -1550,6 +1580,22 @@ impl GitStore { Ok(proto::Ack {}) } + async fn handle_git_clone( + this: Entity, + envelope: TypedEnvelope, + cx: AsyncApp, + ) -> Result { + let path: Arc = PathBuf::from(envelope.payload.abs_path).into(); + let repo_name = envelope.payload.remote_repo; + let result = cx + .update(|cx| this.read(cx).git_clone(repo_name, path, cx))? + .await; + + Ok(proto::GitCloneResponse { + success: result.is_ok(), + }) + } + async fn handle_fetch( this: Entity, envelope: TypedEnvelope, @@ -1838,6 +1884,23 @@ impl GitStore { .collect::>(), }) } + async fn handle_get_default_branch( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let branch = repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.default_branch() + })? + .await?? + .map(Into::into); + + Ok(proto::GetDefaultBranchResponse { branch }) + } async fn handle_create_branch( this: Entity, envelope: TypedEnvelope, @@ -2157,13 +2220,13 @@ impl GitStore { ) -> Result<()> { let buffer_id = BufferId::new(request.payload.buffer_id)?; this.update(&mut cx, |this, cx| { - if let Some(diff_state) = this.diffs.get_mut(&buffer_id) { - if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) { - let buffer = buffer.read(cx).text_snapshot(); - diff_state.update(cx, |diff_state, cx| { - diff_state.handle_base_texts_updated(buffer, request.payload, cx); - }) - } + if let Some(diff_state) = this.diffs.get_mut(&buffer_id) + && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) + { + let buffer = buffer.read(cx).text_snapshot(); + diff_state.update(cx, |diff_state, cx| { + diff_state.handle_base_texts_updated(buffer, request.payload, cx); + }) } }) } @@ -2275,16 +2338,20 @@ impl GitStore { return None; }; - let mut paths = vec![]; + let mut paths = Vec::new(); // All paths prefixed by a given repo will constitute a continuous range. while let Some(path) = entries.get(ix) && let Some(repo_path) = - RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path) + RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path) { paths.push((repo_path, ix)); ix += 1; } - Some((repo, paths)) + if paths.is_empty() { + None + } else { + Some((repo, paths)) + } }); tasks.push_back(task); } @@ -2429,14 +2496,14 @@ impl BufferGitState { pub fn wait_for_recalculation(&mut self) -> Option + use<>> { if *self.recalculating_tx.borrow() { let mut rx = self.recalculating_tx.subscribe(); - return Some(async move { + Some(async move { loop { let is_recalculating = rx.recv().await; if is_recalculating != Some(true) { break; } } - }); + }) } else { None } @@ -2696,6 +2763,7 @@ impl RepositorySnapshot { .iter() .map(|repo_path| repo_path.to_proto()) .collect(), + merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, id: self.id.to_proto(), abs_path: self.work_directory_abs_path.to_proto(), @@ -2758,6 +2826,7 @@ impl RepositorySnapshot { .iter() .map(|path| path.as_ref().to_proto()) .collect(), + merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, id: self.id.to_proto(), abs_path: self.work_directory_abs_path.to_proto(), @@ -2797,15 +2866,15 @@ impl RepositorySnapshot { } pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool { - self.merge.conflicted_paths.contains(&repo_path) + self.merge.conflicted_paths.contains(repo_path) } pub fn has_conflict(&self, repo_path: &RepoPath) -> bool { let had_conflict_on_last_merge_head_change = - self.merge.conflicted_paths.contains(&repo_path); + self.merge.conflicted_paths.contains(repo_path); let has_conflict_currently = self - .status_for_path(&repo_path) - .map_or(false, |entry| entry.status.is_conflicted()); + .status_for_path(repo_path) + .is_some_and(|entry| entry.status.is_conflicted()); had_conflict_on_last_merge_head_change || has_conflict_currently } @@ -3346,7 +3415,6 @@ impl Repository { reset_mode: ResetMode, _cx: &mut App, ) -> oneshot::Receiver> { - let commit = commit.to_string(); let id = self.id; self.send_job(None, move |git_repo, _| async move { @@ -3453,14 +3521,13 @@ impl Repository { let Some(project_path) = self.repo_path_to_project_path(path, cx) else { continue; }; - if let Some(buffer) = buffer_store.get_by_path(&project_path) { - if buffer + if let Some(buffer) = buffer_store.get_by_path(&project_path) + && buffer .read(cx) .file() - .map_or(false, |file| file.disk_state().exists()) - { - save_futures.push(buffer_store.save_buffer(buffer, cx)); - } + .is_some_and(|file| file.disk_state().exists()) + { + save_futures.push(buffer_store.save_buffer(buffer, cx)); } } }) @@ -3520,14 +3587,13 @@ impl Repository { let Some(project_path) = self.repo_path_to_project_path(path, cx) else { continue; }; - if let Some(buffer) = buffer_store.get_by_path(&project_path) { - if buffer + if let Some(buffer) = buffer_store.get_by_path(&project_path) + && buffer .read(cx) .file() - .map_or(false, |file| file.disk_state().exists()) - { - save_futures.push(buffer_store.save_buffer(buffer, cx)); - } + .is_some_and(|file| file.disk_state().exists()) + { + save_futures.push(buffer_store.save_buffer(buffer, cx)); } } }) @@ -3574,7 +3640,7 @@ impl Repository { let to_stage = self .cached_status() .filter(|entry| !entry.status.staging().is_fully_staged()) - .map(|entry| entry.repo_path.clone()) + .map(|entry| entry.repo_path) .collect(); self.stage_entries(to_stage, cx) } @@ -3583,16 +3649,13 @@ impl Repository { let to_unstage = self .cached_status() .filter(|entry| entry.status.staging().has_staged()) - .map(|entry| entry.repo_path.clone()) + .map(|entry| entry.repo_path) .collect(); self.unstage_entries(to_unstage, cx) } pub fn stash_all(&mut self, cx: &mut Context) -> Task> { - let to_stash = self - .cached_status() - .map(|entry| entry.repo_path.clone()) - .collect(); + let to_stash = self.cached_status().map(|entry| entry.repo_path).collect(); self.stash_entries(to_stash, cx) } @@ -4188,6 +4251,7 @@ impl Repository { .map(proto_to_commit_details); self.snapshot.merge.conflicted_paths = conflicted_paths; + self.snapshot.merge.message = update.merge_message.map(SharedString::from); let edits = update .removed_statuses @@ -4264,7 +4328,8 @@ impl Repository { bail!("not a local repository") }; let (snapshot, events) = this - .read_with(&mut cx, |this, _| { + .update(&mut cx, |this, _| { + this.paths_needing_status_update.clear(); compute_snapshot( this.id, this.work_directory_abs_path.clone(), @@ -4339,14 +4404,13 @@ impl Repository { } if let Some(job) = jobs.pop_front() { - if let Some(current_key) = &job.key { - if jobs + if let Some(current_key) = &job.key + && jobs .iter() .any(|other_job| other_job.key.as_ref() == Some(current_key)) { continue; } - } (job.job)(state.clone(), cx).await; } else if let Some(job) = job_rx.next().await { jobs.push_back(job); @@ -4377,13 +4441,12 @@ impl Repository { } if let Some(job) = jobs.pop_front() { - if let Some(current_key) = &job.key { - if jobs + if let Some(current_key) = &job.key + && jobs .iter() .any(|other_job| other_job.key.as_ref() == Some(current_key)) - { - continue; - } + { + continue; } (job.job)(state.clone(), cx).await; } else if let Some(job) = job_rx.next().await { @@ -4494,6 +4557,9 @@ impl Repository { }; let paths = changed_paths.iter().cloned().collect::>(); + if paths.is_empty() { + return Ok(()); + } let statuses = backend.status(&paths).await?; let changed_path_statuses = cx @@ -4504,10 +4570,10 @@ impl Repository { for (repo_path, status) in &*statuses.entries { changed_paths.remove(repo_path); - if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) { - if cursor.item().is_some_and(|entry| entry.status == *status) { - continue; - } + if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) + && cursor.item().is_some_and(|entry| entry.status == *status) + { + continue; } changed_path_statuses.push(Edit::Insert(StatusEntry { diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 27b191f65f896e6488a4d9c52f37e9426cac1c46..313a1e90adc2fde8a62dbe6aa60b4d3a366af22c 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -369,7 +369,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(0, buffer_id, test_content.to_string()); + let buffer = Buffer::new(0, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -400,7 +400,7 @@ mod tests { >>>>>>> "# .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(0, buffer_id, test_content.to_string()); + let buffer = Buffer::new(0, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -653,7 +653,7 @@ mod tests { cx.run_until_parked(); conflict_set.update(cx, |conflict_set, _| { - assert_eq!(conflict_set.has_conflict, false); + assert!(!conflict_set.has_conflict); assert_eq!(conflict_set.snapshot.conflicts.len(), 0); }); diff --git a/crates/project/src/git_store/git_traversal.rs b/crates/project/src/git_store/git_traversal.rs index bbcffe046debd8ab4529cf2b661abbebefd13f47..eee492e482daf746c60836cab172f84b2834b468 100644 --- a/crates/project/src/git_store/git_traversal.rs +++ b/crates/project/src/git_store/git_traversal.rs @@ -42,8 +42,8 @@ impl<'a> GitTraversal<'a> { // other_repo/ // .git/ // our_query.txt - let mut query = path.ancestors(); - while let Some(query) = query.next() { + let query = path.ancestors(); + for query in query { let (_, snapshot) = self .repo_root_to_snapshot .range(Path::new("")..=query) @@ -182,11 +182,11 @@ impl<'a> Iterator for ChildEntriesGitIter<'a> { type Item = GitEntryRef<'a>; fn next(&mut self) -> Option { - if let Some(item) = self.traversal.entry() { - if item.path.starts_with(self.parent_path) { - self.traversal.advance_to_sibling(); - return Some(item); - } + if let Some(item) = self.traversal.entry() + && item.path.starts_with(self.parent_path) + { + self.traversal.advance_to_sibling(); + return Some(item); } None } @@ -199,7 +199,7 @@ pub struct GitEntryRef<'a> { } impl GitEntryRef<'_> { - pub fn to_owned(&self) -> GitEntry { + pub fn to_owned(self) -> GitEntry { GitEntry { entry: self.entry.clone(), git_summary: self.git_summary, @@ -211,7 +211,7 @@ impl Deref for GitEntryRef<'_> { type Target = Entry; fn deref(&self) -> &Self::Target { - &self.entry + self.entry } } diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index 79f134b91a36a2f7d1f3f256506931b47ae8cf9c..e499d4e026f724f12e023738f12afb2735f9ce2d 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -224,7 +224,7 @@ impl ProjectItem for ImageItem { path: &ProjectPath, cx: &mut App, ) -> Option>>> { - if is_image_file(&project, &path, cx) { + if is_image_file(project, path, cx) { Some(cx.spawn({ let path = path.clone(); let project = project.clone(); @@ -244,7 +244,7 @@ impl ProjectItem for ImageItem { } fn project_path(&self, cx: &App) -> Option { - Some(self.project_path(cx).clone()) + Some(self.project_path(cx)) } fn is_dirty(&self) -> bool { @@ -375,7 +375,6 @@ impl ImageStore { let (mut tx, rx) = postage::watch::channel(); entry.insert(rx.clone()); - let project_path = project_path.clone(); let load_image = self .state .open_image(project_path.path.clone(), worktree, cx); @@ -446,15 +445,12 @@ impl ImageStore { event: &ImageItemEvent, cx: &mut Context, ) { - match event { - ImageItemEvent::FileHandleChanged => { - if let Some(local) = self.state.as_local() { - local.update(cx, |local, cx| { - local.image_changed_file(image, cx); - }) - } - } - _ => {} + if let ImageItemEvent::FileHandleChanged = event + && let Some(local) = self.state.as_local() + { + local.update(cx, |local, cx| { + local.image_changed_file(image, cx); + }) } } } @@ -531,13 +527,10 @@ impl ImageStoreImpl for Entity { impl LocalImageStore { fn subscribe_to_worktree(&mut self, worktree: &Entity, cx: &mut Context) { cx.subscribe(worktree, |this, worktree, event, cx| { - if worktree.read(cx).is_local() { - match event { - worktree::Event::UpdatedEntries(changes) => { - this.local_worktree_entries_changed(&worktree, changes, cx); - } - _ => {} - } + if worktree.read(cx).is_local() + && let worktree::Event::UpdatedEntries(changes) = event + { + this.local_worktree_entries_changed(&worktree, changes, cx); } }) .detach(); diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index c458b6b300c34ec03d144cf297277faf4a94f5db..ce7a871d1a63107ab4908dccea68dd41d73a319f 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -332,9 +332,9 @@ impl LspCommand for PrepareRename { _: Entity, buffer: Entity, _: LanguageServerId, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result { - buffer.read_with(&mut cx, |buffer, _| match message { + buffer.read_with(&cx, |buffer, _| match message { Some(lsp::PrepareRenameResponse::Range(range)) | Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. }) => { let Range { start, end } = range_from_lsp(range); @@ -386,7 +386,7 @@ impl LspCommand for PrepareRename { .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -500,13 +500,12 @@ impl LspCommand for PerformRename { mut cx: AsyncApp, ) -> Result { if let Some(edit) = message { - let (lsp_adapter, lsp_server) = + let (_, lsp_server) = language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?; LocalLspStore::deserialize_workspace_edit( lsp_store, edit, self.push_to_history, - lsp_adapter, lsp_server, &mut cx, ) @@ -544,7 +543,7 @@ impl LspCommand for PerformRename { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, new_name: message.new_name, push_to_history: false, }) @@ -659,7 +658,7 @@ impl LspCommand for GetDefinitions { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -762,7 +761,7 @@ impl LspCommand for GetDeclarations { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -864,7 +863,7 @@ impl LspCommand for GetImplementations { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -963,7 +962,7 @@ impl LspCommand for GetTypeDefinitions { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -1116,18 +1115,12 @@ pub async fn location_links_from_lsp( } } - let (lsp_adapter, language_server) = - language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?; + let (_, language_server) = language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?; let mut definitions = Vec::new(); for (origin_range, target_uri, target_range) in unresolved_links { let target_buffer_handle = lsp_store .update(&mut cx, |this, cx| { - this.open_local_buffer_via_lsp( - target_uri, - language_server.server_id(), - lsp_adapter.name.clone(), - cx, - ) + this.open_local_buffer_via_lsp(target_uri, language_server.server_id(), cx) })? .await?; @@ -1172,8 +1165,7 @@ pub async fn location_link_from_lsp( server_id: LanguageServerId, cx: &mut AsyncApp, ) -> Result { - let (lsp_adapter, language_server) = - language_server_for_buffer(&lsp_store, &buffer, server_id, cx)?; + let (_, language_server) = language_server_for_buffer(lsp_store, buffer, server_id, cx)?; let (origin_range, target_uri, target_range) = ( link.origin_selection_range, @@ -1183,12 +1175,7 @@ pub async fn location_link_from_lsp( let target_buffer_handle = lsp_store .update(cx, |lsp_store, cx| { - lsp_store.open_local_buffer_via_lsp( - target_uri, - language_server.server_id(), - lsp_adapter.name.clone(), - cx, - ) + lsp_store.open_local_buffer_via_lsp(target_uri, language_server.server_id(), cx) })? .await?; @@ -1326,7 +1313,7 @@ impl LspCommand for GetReferences { mut cx: AsyncApp, ) -> Result> { let mut references = Vec::new(); - let (lsp_adapter, language_server) = + let (_, language_server) = language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?; if let Some(locations) = locations { @@ -1336,7 +1323,6 @@ impl LspCommand for GetReferences { lsp_store.open_local_buffer_via_lsp( lsp_location.uri, language_server.server_id(), - lsp_adapter.name.clone(), cx, ) })? @@ -1344,7 +1330,7 @@ impl LspCommand for GetReferences { target_buffer_handle .clone() - .read_with(&mut cx, |target_buffer, _| { + .read_with(&cx, |target_buffer, _| { let target_start = target_buffer .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); let target_end = target_buffer @@ -1388,7 +1374,7 @@ impl LspCommand for GetReferences { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -1498,9 +1484,9 @@ impl LspCommand for GetDocumentHighlights { _: Entity, buffer: Entity, _: LanguageServerId, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result> { - buffer.read_with(&mut cx, |buffer, _| { + buffer.read_with(&cx, |buffer, _| { let mut lsp_highlights = lsp_highlights.unwrap_or_default(); lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end))); lsp_highlights @@ -1548,7 +1534,7 @@ impl LspCommand for GetDocumentHighlights { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -1879,7 +1865,7 @@ impl LspCommand for GetSignatureHelp { })? .await .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?; - let buffer_snapshot = buffer.read_with(&mut cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; Ok(Self { position: payload .position @@ -1961,13 +1947,13 @@ impl LspCommand for GetHover { _: Entity, buffer: Entity, _: LanguageServerId, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result { let Some(hover) = message else { return Ok(None); }; - let (language, range) = buffer.read_with(&mut cx, |buffer, _| { + let (language, range) = buffer.read_with(&cx, |buffer, _| { ( buffer.language().cloned(), hover.range.map(|range| { @@ -2053,7 +2039,7 @@ impl LspCommand for GetHover { })? .await?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -2127,7 +2113,7 @@ impl LspCommand for GetHover { return Ok(None); } - let language = buffer.read_with(&mut cx, |buffer, _| buffer.language().cloned())?; + let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned())?; let range = if let (Some(start), Some(end)) = (message.start, message.end) { language::proto::deserialize_anchor(start) .and_then(|start| language::proto::deserialize_anchor(end).map(|end| start..end)) @@ -2222,7 +2208,7 @@ impl LspCommand for GetCompletions { let unfiltered_completions_count = completions.len(); let language_server_adapter = lsp_store - .read_with(&mut cx, |lsp_store, _| { + .read_with(&cx, |lsp_store, _| { lsp_store.language_server_adapter_for_id(server_id) })? .with_context(|| format!("no language server with id {server_id}"))?; @@ -2355,15 +2341,14 @@ impl LspCommand for GetCompletions { .zip(completion_edits) .map(|(mut lsp_completion, mut edit)| { LineEnding::normalize(&mut edit.new_text); - if lsp_completion.data.is_none() { - if let Some(default_data) = lsp_defaults + if lsp_completion.data.is_none() + && let Some(default_data) = lsp_defaults .as_ref() .and_then(|item_defaults| item_defaults.data.clone()) - { - // Servers (e.g. JDTLS) prefer unchanged completions, when resolving the items later, - // so we do not insert the defaults here, but `data` is needed for resolving, so this is an exception. - lsp_completion.data = Some(default_data); - } + { + // Servers (e.g. JDTLS) prefer unchanged completions, when resolving the items later, + // so we do not insert the defaults here, but `data` is needed for resolving, so this is an exception. + lsp_completion.data = Some(default_data); } CoreCompletion { replace_range: edit.replace_range, @@ -2409,7 +2394,7 @@ impl LspCommand for GetCompletions { .position .and_then(language::proto::deserialize_anchor) .map(|p| { - buffer.read_with(&mut cx, |buffer, _| { + buffer.read_with(&cx, |buffer, _| { buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left) }) }) @@ -2516,8 +2501,8 @@ pub(crate) fn parse_completion_text_edit( }; Some(ParsedCompletionEdit { - insert_range: insert_range, - replace_range: replace_range, + insert_range, + replace_range, new_text: new_text.clone(), }) } @@ -2610,11 +2595,9 @@ impl LspCommand for GetCodeActions { server_id: LanguageServerId, cx: AsyncApp, ) -> Result> { - let requested_kinds_set = if let Some(kinds) = self.kinds { - Some(kinds.into_iter().collect::>()) - } else { - None - }; + let requested_kinds_set = self + .kinds + .map(|kinds| kinds.into_iter().collect::>()); let language_server = cx.update(|cx| { lsp_store @@ -2637,10 +2620,10 @@ impl LspCommand for GetCodeActions { .filter_map(|entry| { let (lsp_action, resolved) = match entry { lsp::CodeActionOrCommand::CodeAction(lsp_action) => { - if let Some(command) = lsp_action.command.as_ref() { - if !available_commands.contains(&command.command) { - return None; - } + if let Some(command) = lsp_action.command.as_ref() + && !available_commands.contains(&command.command) + { + return None; } (LspAction::Action(Box::new(lsp_action)), false) } @@ -2655,10 +2638,9 @@ impl LspCommand for GetCodeActions { if let Some((requested_kinds, kind)) = requested_kinds_set.as_ref().zip(lsp_action.action_kind()) + && !requested_kinds.contains(&kind) { - if !requested_kinds.contains(&kind) { - return None; - } + return None; } Some(CodeAction { @@ -2755,7 +2737,7 @@ impl GetCodeActions { Some(lsp::CodeActionProviderCapability::Options(CodeActionOptions { code_action_kinds: Some(supported_action_kinds), .. - })) => Some(supported_action_kinds.clone()), + })) => Some(supported_action_kinds), _ => capabilities.code_action_kinds, } } @@ -2878,7 +2860,7 @@ impl LspCommand for OnTypeFormatting { })?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, trigger: message.trigger.clone(), options, push_to_history: false, @@ -3462,10 +3444,7 @@ impl LspCommand for GetCodeLens { capabilities .server_capabilities .code_lens_provider - .as_ref() - .map_or(false, |code_lens_options| { - code_lens_options.resolve_provider.unwrap_or(false) - }) + .is_some() } fn to_lsp( @@ -3490,9 +3469,9 @@ impl LspCommand for GetCodeLens { lsp_store: Entity, buffer: Entity, server_id: LanguageServerId, - mut cx: AsyncApp, + cx: AsyncApp, ) -> anyhow::Result> { - let snapshot = buffer.read_with(&mut cx, |buffer, _| buffer.snapshot())?; + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; let language_server = cx.update(|cx| { lsp_store .read(cx) @@ -3811,7 +3790,7 @@ impl GetDocumentDiagnostics { }, uri: lsp::Url::parse(&info.location_url.unwrap()).unwrap(), }, - message: info.message.clone(), + message: info.message, } }) .collect::>(); @@ -3839,12 +3818,11 @@ impl GetDocumentDiagnostics { _ => None, }, code, - code_description: match diagnostic.code_description { - Some(code_description) => Some(CodeDescription { + code_description: diagnostic + .code_description + .map(|code_description| CodeDescription { href: Some(lsp::Url::parse(&code_description).unwrap()), }), - None => None, - }, related_information: Some(related_information), tags: Some(tags), source: diagnostic.source.clone(), @@ -4509,9 +4487,8 @@ mod tests { data: Some(json!({"detail": "test detail"})), }; - let proto_diagnostic = - GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic.clone()) - .expect("Failed to serialize diagnostic"); + let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic) + .expect("Failed to serialize diagnostic"); let start = proto_diagnostic.start.unwrap(); let end = proto_diagnostic.end.unwrap(); diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index b88cf42ff51639f159333e2a81f5cd768bb7ff46..709bd10358261c79ad4c54a2526495ab899e9b29 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1,3 +1,14 @@ +//! LSP store provides unified access to the language server protocol. +//! The consumers of LSP store can interact with language servers without knowing exactly which language server they're interacting with. +//! +//! # Local/Remote LSP Stores +//! This module is split up into three distinct parts: +//! - [`LocalLspStore`], which is ran on the host machine (either project host or SSH host), that manages the lifecycle of language servers. +//! - [`RemoteLspStore`], which is ran on the remote machine (project guests) which is mostly about passing through the requests via RPC. +//! The remote stores don't really care about which language server they're running against - they don't usually get to decide which language server is going to responsible for handling their request. +//! - [`LspStore`], which unifies the two under one consistent interface for interacting with language servers. +//! +//! Most of the interesting work happens at the local layer, as bulk of the complexity is with managing the lifecycle of language servers. The actual implementation of the LSP protocol is handled by [`lsp`] crate. pub mod clangd_ext; pub mod json_language_server_ext; pub mod lsp_ext_command; @@ -6,20 +17,20 @@ pub mod rust_analyzer_ext; use crate::{ CodeAction, ColorPresentation, Completion, CompletionResponse, CompletionSource, CoreCompletion, DocumentColor, Hover, InlayHint, LocationLink, LspAction, LspPullDiagnostics, - ProjectItem, ProjectPath, ProjectTransaction, PulledDiagnostics, ResolveState, Symbol, - ToolchainStore, + ManifestProvidersStore, ProjectItem, ProjectPath, ProjectTransaction, PulledDiagnostics, + ResolveState, Symbol, buffer_store::{BufferStore, BufferStoreEvent}, environment::ProjectEnvironment, lsp_command::{self, *}, lsp_store, manifest_tree::{ - AdapterQuery, LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, - ManifestQueryDelegate, ManifestTree, + LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, ManifestQueryDelegate, + ManifestTree, }, prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, - toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent}, + toolchain_store::{LocalToolchainStore, ToolchainStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, }; @@ -44,9 +55,9 @@ use itertools::Itertools as _; use language::{ Bias, BinaryStatus, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff, File as _, Language, LanguageName, - LanguageRegistry, LanguageToolchainStore, LocalFile, LspAdapter, LspAdapterDelegate, Patch, - PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, - WorkspaceFoldersContent, + LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate, ManifestDelegate, ManifestName, + Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Toolchain, Transaction, + Unclipped, language_settings::{ FormatOnSave, Formatter, LanguageSettings, SelectedFormatter, language_settings, }, @@ -61,9 +72,10 @@ use lsp::{ AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, FileSystemWatcher, - LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, - LanguageServerName, LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType, - OneOf, RenameFilesParams, SymbolKind, TextEdit, WillRenameFiles, WorkDoneProgressCancelParams, + LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, + LanguageServerId, LanguageServerName, LanguageServerSelector, LspRequestFuture, + MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, + TextDocumentSyncSaveOptions, TextEdit, WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles, }; use node_runtime::read_package_installed_version; @@ -73,7 +85,7 @@ use rand::prelude::*; use rpc::{ AnyProtoClient, - proto::{FromProto, ToProto}, + proto::{FromProto, LspRequestId, LspRequestMessage as _, ToProto}, }; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; @@ -81,7 +93,7 @@ use sha2::{Digest, Sha256}; use smol::channel::Sender; use snippet::Snippet; use std::{ - any::Any, + any::{Any, TypeId}, borrow::Cow, cell::RefCell, cmp::{Ordering, Reverse}, @@ -140,6 +152,20 @@ impl FormatTrigger { } } +#[derive(Clone)] +struct UnifiedLanguageServer { + id: LanguageServerId, + project_roots: HashSet>, +} + +#[derive(Clone, Hash, PartialEq, Eq)] +struct LanguageServerSeed { + worktree_id: WorktreeId, + name: LanguageServerName, + toolchain: Option, + settings: Arc, +} + #[derive(Debug)] pub struct DocumentDiagnosticsUpdate<'a, D> { pub diagnostics: D, @@ -157,17 +183,18 @@ pub struct DocumentDiagnostics { pub struct LocalLspStore { weak: WeakEntity, worktree_store: Entity, - toolchain_store: Entity, + toolchain_store: Entity, http_client: Arc, environment: Entity, fs: Arc, languages: Arc, - language_server_ids: HashMap<(WorktreeId, LanguageServerName), BTreeSet>, + language_server_ids: HashMap, yarn: Entity, pub language_servers: HashMap, buffers_being_formatted: HashSet, last_workspace_edits_by_language_server: HashMap, language_server_watched_paths: HashMap, + watched_manifest_filenames: HashSet, language_server_paths_watched_for_rename: HashMap, language_server_watcher_registrations: @@ -188,7 +215,7 @@ pub struct LocalLspStore { >, buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots _subscription: gpui::Subscription, - lsp_tree: Entity, + lsp_tree: LanguageServerTree, registered_buffers: HashMap, buffers_opened_in_servers: HashMap>, buffer_pull_diagnostics_result_ids: HashMap>>, @@ -208,30 +235,81 @@ impl LocalLspStore { } } + fn get_or_insert_language_server( + &mut self, + worktree_handle: &Entity, + delegate: Arc, + disposition: &Arc, + language_name: &LanguageName, + cx: &mut App, + ) -> LanguageServerId { + let key = LanguageServerSeed { + worktree_id: worktree_handle.read(cx).id(), + name: disposition.server_name.clone(), + settings: disposition.settings.clone(), + toolchain: disposition.toolchain.clone(), + }; + if let Some(state) = self.language_server_ids.get_mut(&key) { + state.project_roots.insert(disposition.path.path.clone()); + state.id + } else { + let adapter = self + .languages + .lsp_adapters(language_name) + .into_iter() + .find(|adapter| adapter.name() == disposition.server_name) + .expect("To find LSP adapter"); + let new_language_server_id = self.start_language_server( + worktree_handle, + delegate, + adapter, + disposition.settings.clone(), + key.clone(), + cx, + ); + if let Some(state) = self.language_server_ids.get_mut(&key) { + state.project_roots.insert(disposition.path.path.clone()); + } else { + debug_assert!( + false, + "Expected `start_language_server` to ensure that `key` exists in a map" + ); + } + new_language_server_id + } + } + fn start_language_server( &mut self, worktree_handle: &Entity, delegate: Arc, adapter: Arc, settings: Arc, + key: LanguageServerSeed, cx: &mut App, ) -> LanguageServerId { let worktree = worktree_handle.read(cx); - let worktree_id = worktree.id(); - let root_path = worktree.abs_path(); - let key = (worktree_id, adapter.name.clone()); + let root_path = worktree.abs_path(); + let toolchain = key.toolchain.clone(); let override_options = settings.initialization_options.clone(); let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); let server_id = self.languages.next_language_server_id(); - log::info!( + log::trace!( "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", adapter.name.0 ); - let binary = self.get_language_server_binary(adapter.clone(), delegate.clone(), true, cx); + let binary = self.get_language_server_binary( + adapter.clone(), + settings, + toolchain.clone(), + delegate.clone(), + true, + cx, + ); let pending_workspace_folders: Arc>> = Default::default(); let pending_server = cx.spawn({ @@ -267,10 +345,7 @@ impl LocalLspStore { binary, &root_path, code_action_kinds, - Some(pending_workspace_folders).filter(|_| { - adapter.adapter.workspace_folders_content() - == WorkspaceFoldersContent::SubprojectRoots - }), + Some(pending_workspace_folders), cx, ) } @@ -290,15 +365,13 @@ impl LocalLspStore { .enabled; cx.spawn(async move |cx| { let result = async { - let toolchains = - lsp_store.update(cx, |lsp_store, cx| lsp_store.toolchain_store(cx))?; let language_server = pending_server.await?; let workspace_config = Self::workspace_configuration_for_adapter( adapter.adapter.clone(), fs.as_ref(), &delegate, - toolchains.clone(), + toolchain, cx, ) .await?; @@ -370,14 +443,14 @@ impl LocalLspStore { match result { Ok(server) => { lsp_store - .update(cx, |lsp_store, mut cx| { + .update(cx, |lsp_store, cx| { lsp_store.insert_newly_running_language_server( adapter, server.clone(), server_id, key, pending_workspace_folders, - &mut cx, + cx, ); }) .ok(); @@ -390,13 +463,17 @@ impl LocalLspStore { delegate.update_status( adapter.name(), BinaryStatus::Failed { - error: format!("{err}\n-- stderr--\n{log}"), + error: if log.is_empty() { + format!("{err:#}") + } else { + format!("{err:#}\n-- stderr --\n{log}") + }, }, ); - let message = - format!("Failed to start language server {server_name:?}: {err:#?}"); - log::error!("{message}"); - log::error!("server stderr: {log}"); + log::error!("Failed to start language server {server_name:?}: {err:?}"); + if !log.is_empty() { + log::error!("server stderr: {log}"); + } None } } @@ -413,31 +490,26 @@ impl LocalLspStore { self.language_servers.insert(server_id, state); self.language_server_ids .entry(key) - .or_default() - .insert(server_id); + .or_insert(UnifiedLanguageServer { + id: server_id, + project_roots: Default::default(), + }); server_id } fn get_language_server_binary( &self, adapter: Arc, + settings: Arc, + toolchain: Option, delegate: Arc, allow_binary_download: bool, cx: &mut App, ) -> Task> { - let settings = ProjectSettings::get( - Some(SettingsLocation { - worktree_id: delegate.worktree_id(), - path: Path::new(""), - }), - cx, - ) - .lsp - .get(&adapter.name) - .and_then(|s| s.binary.clone()); - - if settings.as_ref().is_some_and(|b| b.path.is_some()) { - let settings = settings.unwrap(); + if let Some(settings) = settings.binary.as_ref() + && settings.path.is_some() + { + let settings = settings.clone(); return cx.background_spawn(async move { let mut env = delegate.shell_env().await; @@ -457,16 +529,17 @@ impl LocalLspStore { } let lsp_binary_options = LanguageServerBinaryOptions { allow_path_lookup: !settings + .binary .as_ref() .and_then(|b| b.ignore_system_version) .unwrap_or_default(), allow_binary_download, }; - let toolchains = self.toolchain_store.read(cx).as_language_toolchain_store(); + cx.spawn(async move |cx| { let binary_result = adapter .clone() - .get_language_server_command(delegate.clone(), toolchains, lsp_binary_options, cx) + .get_language_server_command(delegate.clone(), toolchain, lsp_binary_options, cx) .await; delegate.update_status(adapter.name.clone(), BinaryStatus::None); @@ -476,12 +549,12 @@ impl LocalLspStore { shell_env.extend(binary.env.unwrap_or_default()); - if let Some(settings) = settings { - if let Some(arguments) = settings.arguments { - binary.arguments = arguments.into_iter().map(Into::into).collect(); + if let Some(settings) = settings.binary.as_ref() { + if let Some(arguments) = &settings.arguments { + binary.arguments = arguments.iter().map(Into::into).collect(); } - if let Some(env) = settings.env { - shell_env.extend(env); + if let Some(env) = &settings.env { + shell_env.extend(env.iter().map(|(k, v)| (k.clone(), v.clone()))); } } @@ -555,14 +628,20 @@ impl LocalLspStore { let fs = fs.clone(); let mut cx = cx.clone(); async move { - let toolchains = - this.update(&mut cx, |this, cx| this.toolchain_store(cx))?; - + let toolchain_for_id = this + .update(&mut cx, |this, _| { + this.as_local()?.language_server_ids.iter().find_map( + |(seed, value)| { + (value.id == server_id).then(|| seed.toolchain.clone()) + }, + ) + })? + .context("Expected the LSP store to be in a local mode")?; let workspace_config = Self::workspace_configuration_for_adapter( adapter.clone(), fs.as_ref(), &delegate, - toolchains.clone(), + toolchain_for_id, &mut cx, ) .await?; @@ -591,10 +670,10 @@ impl LocalLspStore { let this = this.clone(); move |_, cx| { let this = this.clone(); - let mut cx = cx.clone(); + let cx = cx.clone(); async move { - let Some(server) = this - .read_with(&mut cx, |this, _| this.language_server_for_id(server_id))? + let Some(server) = + this.read_with(&cx, |this, _| this.language_server_for_id(server_id))? else { return Ok(None); }; @@ -623,10 +702,9 @@ impl LocalLspStore { async move { this.update(&mut cx, |this, _| { if let Some(status) = this.language_server_statuses.get_mut(&server_id) + && let lsp::NumberOrString::String(token) = params.token { - if let lsp::NumberOrString::String(token) = params.token { - status.progress_tokens.insert(token); - } + status.progress_tokens.insert(token); } })?; @@ -638,139 +716,27 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let lsp_store = this.clone(); move |params, cx| { - let lsp_store = this.clone(); + let lsp_store = lsp_store.clone(); let mut cx = cx.clone(); async move { - for reg in params.registrations { - match reg.method.as_str() { - "workspace/didChangeWatchedFiles" => { - if let Some(options) = reg.register_options { - let options = serde_json::from_value(options)?; - lsp_store.update(&mut cx, |this, cx| { - this.as_local_mut()?.on_lsp_did_change_watched_files( - server_id, ®.id, options, cx, + lsp_store + .update(&mut cx, |lsp_store, cx| { + if lsp_store.as_local().is_some() { + match lsp_store + .register_server_capabilities(server_id, params, cx) + { + Ok(()) => {} + Err(e) => { + log::error!( + "Failed to register server capabilities: {e:#}" ); - Some(()) - })?; - } - } - "textDocument/rangeFormatting" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::< - lsp::DocumentRangeFormattingOptions, - >( - options - ) - }) - .transpose()?; - let provider = match options { - None => OneOf::Left(true), - Some(options) => OneOf::Right(options), - }; - server.update_capabilities(|capabilities| { - capabilities.document_range_formatting_provider = - Some(provider); - }); - notify_server_capabilities_updated(&server, cx); - } - anyhow::Ok(()) - })??; - } - "textDocument/onTypeFormatting" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::< - lsp::DocumentOnTypeFormattingOptions, - >( - options - ) - }) - .transpose()?; - if let Some(options) = options { - server.update_capabilities(|capabilities| { - capabilities - .document_on_type_formatting_provider = - Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } - } - anyhow::Ok(()) - })??; - } - "textDocument/formatting" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::< - lsp::DocumentFormattingOptions, - >( - options - ) - }) - .transpose()?; - let provider = match options { - None => OneOf::Left(true), - Some(options) => OneOf::Right(options), - }; - server.update_capabilities(|capabilities| { - capabilities.document_formatting_provider = - Some(provider); - }); - notify_server_capabilities_updated(&server, cx); - } - anyhow::Ok(()) - })??; - } - "workspace/didChangeConfiguration" => { - // Ignore payload since we notify clients of setting changes unconditionally, relying on them pulling the latest settings. - } - "textDocument/rename" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - let options = reg - .register_options - .map(|options| { - serde_json::from_value::( - options, - ) - }) - .transpose()?; - let options = match options { - None => OneOf::Left(true), - Some(options) => OneOf::Right(options), - }; - - server.update_capabilities(|capabilities| { - capabilities.rename_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); } - anyhow::Ok(()) - })??; + }; } - _ => log::warn!("unhandled capability registration: {reg:?}"), - } - } + }) + .ok(); Ok(()) } } @@ -779,79 +745,27 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let lsp_store = this.clone(); move |params, cx| { - let lsp_store = this.clone(); + let lsp_store = lsp_store.clone(); let mut cx = cx.clone(); async move { - for unreg in params.unregisterations.iter() { - match unreg.method.as_str() { - "workspace/didChangeWatchedFiles" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - lsp_store - .as_local_mut()? - .on_lsp_unregister_did_change_watched_files( - server_id, &unreg.id, cx, + lsp_store + .update(&mut cx, |lsp_store, cx| { + if lsp_store.as_local().is_some() { + match lsp_store + .unregister_server_capabilities(server_id, params, cx) + { + Ok(()) => {} + Err(e) => { + log::error!( + "Failed to unregister server capabilities: {e:#}" ); - Some(()) - })?; - } - "workspace/didChangeConfiguration" => { - // Ignore payload since we notify clients of setting changes unconditionally, relying on them pulling the latest settings. - } - "textDocument/rename" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.rename_provider = None - }); - notify_server_capabilities_updated(&server, cx); - } - })?; - } - "textDocument/rangeFormatting" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.document_range_formatting_provider = - None - }); - notify_server_capabilities_updated(&server, cx); - } - })?; - } - "textDocument/onTypeFormatting" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.document_on_type_formatting_provider = - None; - }); - notify_server_capabilities_updated(&server, cx); - } - })?; - } - "textDocument/formatting" => { - lsp_store.update(&mut cx, |lsp_store, cx| { - if let Some(server) = - lsp_store.language_server_for_id(server_id) - { - server.update_capabilities(|capabilities| { - capabilities.document_formatting_provider = None; - }); - notify_server_capabilities_updated(&server, cx); } - })?; + } } - _ => log::warn!("unhandled capability unregistration: {unreg:?}"), - } - } + }) + .ok(); Ok(()) } } @@ -860,18 +774,15 @@ impl LocalLspStore { language_server .on_request::({ - let adapter = adapter.clone(); let this = this.clone(); move |params, cx| { let mut cx = cx.clone(); let this = this.clone(); - let adapter = adapter.clone(); async move { LocalLspStore::on_lsp_workspace_edit( this.clone(), params, server_id, - adapter.clone(), &mut cx, ) .await @@ -1007,7 +918,7 @@ impl LocalLspStore { message: params.message, actions: vec![], response_channel: tx, - lsp_name: name.clone(), + lsp_name: name, }; let _ = this.update(&mut cx, |_, cx| { @@ -1104,10 +1015,10 @@ impl LocalLspStore { } } LanguageServerState::Starting { startup, .. } => { - if let Some(server) = startup.await { - if let Some(shutdown) = server.shutdown() { - shutdown.await; - } + if let Some(server) = startup.await + && let Some(shutdown) = server.shutdown() + { + shutdown.await; } } } @@ -1120,19 +1031,18 @@ impl LocalLspStore { ) -> impl Iterator> { self.language_server_ids .iter() - .flat_map(move |((language_server_path, _), ids)| { - ids.iter().filter_map(move |id| { - if *language_server_path != worktree_id { - return None; - } - if let Some(LanguageServerState::Running { server, .. }) = - self.language_servers.get(id) - { - return Some(server); - } else { - None - } - }) + .filter_map(move |(seed, state)| { + if seed.worktree_id != worktree_id { + return None; + } + + if let Some(LanguageServerState::Running { server, .. }) = + self.language_servers.get(&state.id) + { + Some(server) + } else { + None + } }) } @@ -1149,19 +1059,18 @@ impl LocalLspStore { else { return Vec::new(); }; - let delegate = Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot())); - let root = self.lsp_tree.update(cx, |this, cx| { - this.get( + let delegate: Arc = + Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot())); + + self.lsp_tree + .get( project_path, - AdapterQuery::Language(&language.name()), - delegate, + language.name(), + language.manifest(), + &delegate, cx, ) - .filter_map(|node| node.server_id()) .collect::>() - }); - - root } fn language_server_ids_for_buffer( @@ -1243,7 +1152,7 @@ impl LocalLspStore { .collect::>() }) })?; - for (lsp_adapter, language_server) in adapters_and_servers.iter() { + for (_, language_server) in adapters_and_servers.iter() { let actions = Self::get_server_code_actions_from_action_kinds( &lsp_store, language_server.server_id(), @@ -1255,7 +1164,6 @@ impl LocalLspStore { Self::execute_code_actions_on_server( &lsp_store, language_server, - lsp_adapter, actions, push_to_history, &mut project_transaction, @@ -1970,7 +1878,7 @@ impl LocalLspStore { ) -> Result, Arc)>> { let capabilities = &language_server.capabilities(); let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref(); - if range_formatting_provider.map_or(false, |provider| provider == &OneOf::Left(false)) { + if range_formatting_provider == Some(&OneOf::Left(false)) { anyhow::bail!( "{} language server does not support range formatting", language_server.name() @@ -2017,7 +1925,7 @@ impl LocalLspStore { if let Some(lsp_edits) = lsp_edits { this.update(cx, |this, cx| { this.as_local_mut().unwrap().edits_from_lsp( - &buffer_handle, + buffer_handle, lsp_edits, language_server.server_id(), None, @@ -2198,13 +2106,14 @@ impl LocalLspStore { let buffer = buffer_handle.read(cx); let file = buffer.file().cloned(); + let Some(file) = File::from_dyn(file.as_ref()) else { return; }; if !file.is_local() { return; } - + let path = ProjectPath::from_file(file, cx); let worktree_id = file.worktree_id(cx); let language = buffer.language().cloned(); @@ -2227,46 +2136,52 @@ impl LocalLspStore { let Some(language) = language else { return; }; - for adapter in self.languages.lsp_adapters(&language.name()) { - let servers = self - .language_server_ids - .get(&(worktree_id, adapter.name.clone())); - if let Some(server_ids) = servers { - for server_id in server_ids { - let server = self - .language_servers - .get(server_id) - .and_then(|server_state| { - if let LanguageServerState::Running { server, .. } = server_state { - Some(server.clone()) - } else { - None - } - }); - let server = match server { - Some(server) => server, - None => continue, - }; + let Some(snapshot) = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).snapshot()) + else { + return; + }; + let delegate: Arc = Arc::new(ManifestQueryDelegate::new(snapshot)); - buffer_handle.update(cx, |buffer, cx| { - buffer.set_completion_triggers( - server.server_id(), - server - .capabilities() - .completion_provider + for server_id in + self.lsp_tree + .get(path, language.name(), language.manifest(), &delegate, cx) + { + let server = self + .language_servers + .get(&server_id) + .and_then(|server_state| { + if let LanguageServerState::Running { server, .. } = server_state { + Some(server.clone()) + } else { + None + } + }); + let server = match server { + Some(server) => server, + None => continue, + }; + + buffer_handle.update(cx, |buffer, cx| { + buffer.set_completion_triggers( + server.server_id(), + server + .capabilities() + .completion_provider + .as_ref() + .and_then(|provider| { + provider + .trigger_characters .as_ref() - .and_then(|provider| { - provider - .trigger_characters - .as_ref() - .map(|characters| characters.iter().cloned().collect()) - }) - .unwrap_or_default(), - cx, - ); - }); - } - } + .map(|characters| characters.iter().cloned().collect()) + }) + .unwrap_or_default(), + cx, + ); + }); } } @@ -2376,6 +2291,31 @@ impl LocalLspStore { Ok(()) } + fn register_language_server_for_invisible_worktree( + &mut self, + worktree: &Entity, + language_server_id: LanguageServerId, + cx: &mut App, + ) { + let worktree = worktree.read(cx); + let worktree_id = worktree.id(); + debug_assert!(!worktree.is_visible()); + let Some(mut origin_seed) = self + .language_server_ids + .iter() + .find_map(|(seed, state)| (state.id == language_server_id).then(|| seed.clone())) + else { + return; + }; + origin_seed.worktree_id = worktree_id; + self.language_server_ids + .entry(origin_seed) + .or_insert_with(|| UnifiedLanguageServer { + id: language_server_id, + project_roots: Default::default(), + }); + } + fn register_buffer_with_language_servers( &mut self, buffer_handle: &Entity, @@ -2416,27 +2356,23 @@ impl LocalLspStore { }; let language_name = language.name(); let (reused, delegate, servers) = self - .lsp_tree - .update(cx, |lsp_tree, cx| { - self.reuse_existing_language_server(lsp_tree, &worktree, &language_name, cx) - }) - .map(|(delegate, servers)| (true, delegate, servers)) + .reuse_existing_language_server(&self.lsp_tree, &worktree, &language_name, cx) + .map(|(delegate, apply)| (true, delegate, apply(&mut self.lsp_tree))) .unwrap_or_else(|| { let lsp_delegate = LocalLspAdapterDelegate::from_local_lsp(self, &worktree, cx); - let delegate = Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot())); + let delegate: Arc = + Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot())); + let servers = self .lsp_tree - .clone() - .update(cx, |language_server_tree, cx| { - language_server_tree - .get( - ProjectPath { worktree_id, path }, - AdapterQuery::Language(&language.name()), - delegate.clone(), - cx, - ) - .collect::>() - }); + .walk( + ProjectPath { worktree_id, path }, + language.name(), + language.manifest(), + &delegate, + cx, + ) + .collect::>(); (false, lsp_delegate, servers) }); let servers_and_adapters = servers @@ -2446,67 +2382,46 @@ impl LocalLspStore { return None; } if !only_register_servers.is_empty() { - if let Some(server_id) = server_node.server_id() { - if !only_register_servers.contains(&LanguageServerSelector::Id(server_id)) { - return None; - } + if let Some(server_id) = server_node.server_id() + && !only_register_servers.contains(&LanguageServerSelector::Id(server_id)) + { + return None; } - if let Some(name) = server_node.name() { - if !only_register_servers.contains(&LanguageServerSelector::Name(name)) { - return None; - } + if let Some(name) = server_node.name() + && !only_register_servers.contains(&LanguageServerSelector::Name(name)) + { + return None; } } - let server_id = server_node.server_id_or_init( - |LaunchDisposition { - server_name, - path, - settings, - }| { - let server_id = - { - let uri = Url::from_file_path( - worktree.read(cx).abs_path().join(&path.path), - ); - let key = (worktree_id, server_name.clone()); - if !self.language_server_ids.contains_key(&key) { - let language_name = language.name(); - let adapter = self.languages - .lsp_adapters(&language_name) - .into_iter() - .find(|adapter| &adapter.name() == server_name) - .expect("To find LSP adapter"); - self.start_language_server( - &worktree, - delegate.clone(), - adapter, - settings, - cx, - ); - } - if let Some(server_ids) = self - .language_server_ids - .get(&key) - { - debug_assert_eq!(server_ids.len(), 1); - let server_id = server_ids.iter().cloned().next().unwrap(); - if let Some(state) = self.language_servers.get(&server_id) { - if let Ok(uri) = uri { - state.add_workspace_folder(uri); - }; - } - server_id - } else { - unreachable!("Language server ID should be available, as it's registered on demand") - } + let server_id = server_node.server_id_or_init(|disposition| { + let path = &disposition.path; + + { + let uri = + Url::from_file_path(worktree.read(cx).abs_path().join(&path.path)); + + let server_id = self.get_or_insert_language_server( + &worktree, + delegate.clone(), + disposition, + &language_name, + cx, + ); + if let Some(state) = self.language_servers.get(&server_id) + && let Ok(uri) = uri + { + state.add_workspace_folder(uri); }; server_id - }, - )?; + } + })?; let server_state = self.language_servers.get(&server_id)?; - if let LanguageServerState::Running { server, adapter, .. } = server_state { + if let LanguageServerState::Running { + server, adapter, .. + } = server_state + { Some((server.clone(), adapter.clone())) } else { None @@ -2573,13 +2488,16 @@ impl LocalLspStore { } } - fn reuse_existing_language_server( + fn reuse_existing_language_server<'lang_name>( &self, - server_tree: &mut LanguageServerTree, + server_tree: &LanguageServerTree, worktree: &Entity, - language_name: &LanguageName, + language_name: &'lang_name LanguageName, cx: &mut App, - ) -> Option<(Arc, Vec)> { + ) -> Option<( + Arc, + impl FnOnce(&mut LanguageServerTree) -> Vec + use<'lang_name>, + )> { if worktree.read(cx).is_visible() { return None; } @@ -2618,16 +2536,16 @@ impl LocalLspStore { .into_values() .max_by_key(|servers| servers.len())?; - for server_node in &servers { - server_tree.register_reused( - worktree.read(cx).id(), - language_name.clone(), - server_node.clone(), - ); - } + let worktree_id = worktree.read(cx).id(); + let apply = move |tree: &mut LanguageServerTree| { + for server_node in &servers { + tree.register_reused(worktree_id, language_name.clone(), server_node.clone()); + } + servers + }; let delegate = LocalLspAdapterDelegate::from_local_lsp(self, worktree, cx); - Some((delegate, servers)) + Some((delegate, apply)) } pub(crate) fn unregister_old_buffer_from_language_servers( @@ -2722,13 +2640,13 @@ impl LocalLspStore { this.request_lsp(buffer.clone(), server, request, cx) })? .await?; - return Ok(actions); + Ok(actions) } pub async fn execute_code_actions_on_server( lsp_store: &WeakEntity, language_server: &Arc, - lsp_adapter: &Arc, + actions: Vec, push_to_history: bool, project_transaction: &mut ProjectTransaction, @@ -2748,7 +2666,6 @@ impl LocalLspStore { lsp_store.upgrade().context("project dropped")?, edit.clone(), push_to_history, - lsp_adapter.clone(), language_server.clone(), cx, ) @@ -2799,7 +2716,7 @@ impl LocalLspStore { } } } - return Ok(()); + Ok(()) } pub async fn deserialize_text_edits( @@ -2929,7 +2846,6 @@ impl LocalLspStore { this: Entity, edit: lsp::WorkspaceEdit, push_to_history: bool, - lsp_adapter: Arc, language_server: Arc, cx: &mut AsyncApp, ) -> Result { @@ -3030,7 +2946,6 @@ impl LocalLspStore { this.open_local_buffer_via_lsp( op.text_document.uri.clone(), language_server.server_id(), - lsp_adapter.name.clone(), cx, ) })? @@ -3040,11 +2955,11 @@ impl LocalLspStore { .update(cx, |this, cx| { let path = buffer_to_edit.read(cx).project_path(cx); let active_entry = this.active_entry; - let is_active_entry = path.clone().map_or(false, |project_path| { + let is_active_entry = path.is_some_and(|project_path| { this.worktree_store .read(cx) .entry_for_path(&project_path, cx) - .map_or(false, |entry| Some(entry.id) == active_entry) + .is_some_and(|entry| Some(entry.id) == active_entry) }); let local = this.as_local_mut().unwrap(); @@ -3130,16 +3045,14 @@ impl LocalLspStore { buffer.edit([(range, text)], None, cx); } - let transaction = buffer.end_transaction(cx).and_then(|transaction_id| { + buffer.end_transaction(cx).and_then(|transaction_id| { if push_to_history { buffer.finalize_last_transaction(); buffer.get_transaction(transaction_id).cloned() } else { buffer.forget_transaction(transaction_id) } - }); - - transaction + }) })?; if let Some(transaction) = transaction { project_transaction.0.insert(buffer_to_edit, transaction); @@ -3155,7 +3068,6 @@ impl LocalLspStore { this: WeakEntity, params: lsp::ApplyWorkspaceEditParams, server_id: LanguageServerId, - adapter: Arc, cx: &mut AsyncApp, ) -> Result { let this = this.upgrade().context("project project closed")?; @@ -3166,7 +3078,6 @@ impl LocalLspStore { this.clone(), params.edit, true, - adapter.clone(), language_server.clone(), cx, ) @@ -3197,23 +3108,19 @@ impl LocalLspStore { prettier_store.remove_worktree(id_to_remove, cx); }); - let mut servers_to_remove = BTreeMap::default(); + let mut servers_to_remove = BTreeSet::default(); let mut servers_to_preserve = HashSet::default(); - for ((path, server_name), ref server_ids) in &self.language_server_ids { - if *path == id_to_remove { - servers_to_remove.extend(server_ids.iter().map(|id| (*id, server_name.clone()))); + for (seed, state) in &self.language_server_ids { + if seed.worktree_id == id_to_remove { + servers_to_remove.insert(state.id); } else { - servers_to_preserve.extend(server_ids.iter().cloned()); + servers_to_preserve.insert(state.id); } } - servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id)); - - for (server_id_to_remove, _) in &servers_to_remove { - self.language_server_ids - .values_mut() - .for_each(|server_ids| { - server_ids.remove(server_id_to_remove); - }); + servers_to_remove.retain(|server_id| !servers_to_preserve.contains(server_id)); + self.language_server_ids + .retain(|_, state| !servers_to_remove.contains(&state.id)); + for server_id_to_remove in &servers_to_remove { self.language_server_watched_paths .remove(server_id_to_remove); self.language_server_paths_watched_for_rename @@ -3228,7 +3135,7 @@ impl LocalLspStore { } cx.emit(LspStoreEvent::LanguageServerRemoved(*server_id_to_remove)); } - servers_to_remove.into_keys().collect() + servers_to_remove.into_iter().collect() } fn rebuild_watched_paths_inner<'a>( @@ -3257,7 +3164,7 @@ impl LocalLspStore { for watcher in watchers { if let Some((worktree, literal_prefix, pattern)) = - self.worktree_and_path_for_file_watcher(&worktrees, &watcher, cx) + self.worktree_and_path_for_file_watcher(&worktrees, watcher, cx) { worktree.update(cx, |worktree, _| { if let Some((tree, glob)) = @@ -3486,16 +3393,20 @@ impl LocalLspStore { Ok(Some(initialization_config)) } + fn toolchain_store(&self) -> &Entity { + &self.toolchain_store + } + async fn workspace_configuration_for_adapter( adapter: Arc, fs: &dyn Fs, delegate: &Arc, - toolchains: Arc, + toolchain: Option, cx: &mut AsyncApp, ) -> Result { let mut workspace_config = adapter .clone() - .workspace_configuration(fs, delegate, toolchains.clone(), cx) + .workspace_configuration(fs, delegate, toolchain, cx) .await?; for other_adapter in delegate.registered_lsp_adapters() { @@ -3504,13 +3415,7 @@ impl LocalLspStore { } if let Ok(Some(target_config)) = other_adapter .clone() - .additional_workspace_configuration( - adapter.name(), - fs, - delegate, - toolchains.clone(), - cx, - ) + .additional_workspace_configuration(adapter.name(), fs, delegate, cx) .await { merge_json_value_into(target_config.clone(), &mut workspace_config); @@ -3519,6 +3424,16 @@ impl LocalLspStore { Ok(workspace_config) } + + fn language_server_for_id(&self, id: LanguageServerId) -> Option> { + if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) { + Some(server.clone()) + } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) { + Some(Arc::clone(server)) + } else { + None + } + } } fn notify_server_capabilities_updated(server: &LanguageServer, cx: &mut Context) { @@ -3566,7 +3481,6 @@ pub struct LspStore { nonce: u128, buffer_store: Entity, worktree_store: Entity, - toolchain_store: Option>, pub languages: Arc, language_server_statuses: BTreeMap, active_entry: Option, @@ -3577,6 +3491,7 @@ pub struct LspStore { pub(super) lsp_server_capabilities: HashMap, lsp_document_colors: HashMap, lsp_code_lens: HashMap, + running_lsp_requests: HashMap>)>, } #[derive(Debug, Default, Clone)] @@ -3586,7 +3501,7 @@ pub struct DocumentColors { } type DocumentColorTask = Shared>>>; -type CodeLensTask = Shared, Arc>>>; +type CodeLensTask = Shared>, Arc>>>; #[derive(Debug, Default)] struct DocumentColorData { @@ -3666,6 +3581,8 @@ struct CoreSymbol { impl LspStore { pub fn init(client: &AnyProtoClient) { + client.add_entity_request_handler(Self::handle_lsp_query); + client.add_entity_message_handler(Self::handle_lsp_query_response); client.add_entity_request_handler(Self::handle_multi_lsp_query); client.add_entity_request_handler(Self::handle_restart_language_servers); client.add_entity_request_handler(Self::handle_stop_language_servers); @@ -3757,7 +3674,7 @@ impl LspStore { buffer_store: Entity, worktree_store: Entity, prettier_store: Entity, - toolchain_store: Entity, + toolchain_store: Entity, environment: Entity, manifest_tree: Entity, languages: Arc, @@ -3799,7 +3716,7 @@ impl LspStore { mode: LspStoreMode::Local(LocalLspStore { weak: cx.weak_entity(), worktree_store: worktree_store.clone(), - toolchain_store: toolchain_store.clone(), + supplementary_language_servers: Default::default(), languages: languages.clone(), language_server_ids: Default::default(), @@ -3822,16 +3739,22 @@ impl LspStore { .unwrap() .shutdown_language_servers_on_quit(cx) }), - lsp_tree: LanguageServerTree::new(manifest_tree, languages.clone(), cx), + lsp_tree: LanguageServerTree::new( + manifest_tree, + languages.clone(), + toolchain_store.clone(), + ), + toolchain_store, registered_buffers: HashMap::default(), buffers_opened_in_servers: HashMap::default(), buffer_pull_diagnostics_result_ids: HashMap::default(), + watched_manifest_filenames: ManifestProvidersStore::global(cx) + .manifest_file_names(), }), last_formatting_failure: None, downstream_client: None, buffer_store, worktree_store, - toolchain_store: Some(toolchain_store), languages: languages.clone(), language_server_statuses: Default::default(), nonce: StdRng::from_entropy().r#gen(), @@ -3839,6 +3762,7 @@ impl LspStore { lsp_server_capabilities: HashMap::default(), lsp_document_colors: HashMap::default(), lsp_code_lens: HashMap::default(), + running_lsp_requests: HashMap::default(), active_entry: None, _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages, cx), @@ -3869,7 +3793,6 @@ impl LspStore { pub(super) fn new_remote( buffer_store: Entity, worktree_store: Entity, - toolchain_store: Option>, languages: Arc, upstream_client: AnyProtoClient, project_id: u64, @@ -3901,8 +3824,9 @@ impl LspStore { lsp_server_capabilities: HashMap::default(), lsp_document_colors: HashMap::default(), lsp_code_lens: HashMap::default(), + running_lsp_requests: HashMap::default(), active_entry: None, - toolchain_store, + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } @@ -3920,13 +3844,13 @@ impl LspStore { } BufferStoreEvent::BufferChangedFilePath { buffer, old_file } => { let buffer_id = buffer.read(cx).remote_id(); - if let Some(local) = self.as_local_mut() { - if let Some(old_file) = File::from_dyn(old_file.as_ref()) { - local.reset_buffer(buffer, old_file, cx); + if let Some(local) = self.as_local_mut() + && let Some(old_file) = File::from_dyn(old_file.as_ref()) + { + local.reset_buffer(buffer, old_file, cx); - if local.registered_buffers.contains_key(&buffer_id) { - local.unregister_old_buffer_from_language_servers(buffer, old_file, cx); - } + if local.registered_buffers.contains_key(&buffer_id) { + local.unregister_old_buffer_from_language_servers(buffer, old_file, cx); } } @@ -4001,14 +3925,12 @@ impl LspStore { fn on_toolchain_store_event( &mut self, - _: Entity, + _: Entity, event: &ToolchainStoreEvent, _: &mut Context, ) { match event { - ToolchainStoreEvent::ToolchainActivated { .. } => { - self.request_workspace_config_refresh() - } + ToolchainStoreEvent::ToolchainActivated => self.request_workspace_config_refresh(), } } @@ -4080,9 +4002,9 @@ impl LspStore { let local = this.as_local()?; let mut servers = Vec::new(); - for ((worktree_id, _), server_ids) in &local.language_server_ids { - for server_id in server_ids { - let Some(states) = local.language_servers.get(server_id) else { + for (seed, state) in &local.language_server_ids { + + let Some(states) = local.language_servers.get(&state.id) else { continue; }; let (json_adapter, json_server) = match states { @@ -4097,7 +4019,7 @@ impl LspStore { let Some(worktree) = this .worktree_store .read(cx) - .worktree_for_id(*worktree_id, cx) + .worktree_for_id(seed.worktree_id, cx) else { continue; }; @@ -4113,9 +4035,9 @@ impl LspStore { ); servers.push((json_adapter, json_server, json_delegate)); - } + } - return Some(servers); + Some(servers) }) .ok() .flatten(); @@ -4124,10 +4046,10 @@ impl LspStore { return; }; - let Ok(Some((fs, toolchain_store))) = this.read_with(cx, |this, cx| { + let Ok(Some((fs, _))) = this.read_with(cx, |this, _| { let local = this.as_local()?; - let toolchain_store = this.toolchain_store(cx); - return Some((local.fs.clone(), toolchain_store)); + let toolchain_store = local.toolchain_store().clone(); + Some((local.fs.clone(), toolchain_store)) }) else { return; }; @@ -4138,7 +4060,7 @@ impl LspStore { adapter, fs.as_ref(), &delegate, - toolchain_store.clone(), + None, cx, ) .await @@ -4207,7 +4129,7 @@ impl LspStore { local.registered_buffers.remove(&buffer_id); local.buffers_opened_in_servers.remove(&buffer_id); if let Some(file) = File::from_dyn(buffer.read(cx).file()).cloned() { - local.unregister_old_buffer_from_language_servers(&buffer, &file, cx); + local.unregister_old_buffer_from_language_servers(buffer, &file, cx); } } }) @@ -4277,14 +4199,12 @@ impl LspStore { if local .registered_buffers .contains_key(&buffer.read(cx).remote_id()) - { - if let Some(file_url) = + && let Some(file_url) = file_path_to_lsp_url(&f.abs_path(cx)).log_err() - { - local.unregister_buffer_from_language_servers( - &buffer, &file_url, cx, - ); - } + { + local.unregister_buffer_from_language_servers( + &buffer, &file_url, cx, + ); } } } @@ -4382,25 +4302,19 @@ impl LspStore { let buffer = buffer_entity.read(cx); let buffer_file = buffer.file().cloned(); let buffer_id = buffer.remote_id(); - if let Some(local_store) = self.as_local_mut() { - if local_store.registered_buffers.contains_key(&buffer_id) { - if let Some(abs_path) = - File::from_dyn(buffer_file.as_ref()).map(|file| file.abs_path(cx)) - { - if let Some(file_url) = file_path_to_lsp_url(&abs_path).log_err() { - local_store.unregister_buffer_from_language_servers( - buffer_entity, - &file_url, - cx, - ); - } - } - } + if let Some(local_store) = self.as_local_mut() + && local_store.registered_buffers.contains_key(&buffer_id) + && let Some(abs_path) = + File::from_dyn(buffer_file.as_ref()).map(|file| file.abs_path(cx)) + && let Some(file_url) = file_path_to_lsp_url(&abs_path).log_err() + { + local_store.unregister_buffer_from_language_servers(buffer_entity, &file_url, cx); } buffer_entity.update(cx, |buffer, cx| { - if buffer.language().map_or(true, |old_language| { - !Arc::ptr_eq(old_language, &new_language) - }) { + if buffer + .language() + .is_none_or(|old_language| !Arc::ptr_eq(old_language, &new_language)) + { buffer.set_language(Some(new_language.clone()), cx); } }); @@ -4412,33 +4326,28 @@ impl LspStore { let worktree_id = if let Some(file) = buffer_file { let worktree = file.worktree.clone(); - if let Some(local) = self.as_local_mut() { - if local.registered_buffers.contains_key(&buffer_id) { - local.register_buffer_with_language_servers( - buffer_entity, - HashSet::default(), - cx, - ); - } + if let Some(local) = self.as_local_mut() + && local.registered_buffers.contains_key(&buffer_id) + { + local.register_buffer_with_language_servers(buffer_entity, HashSet::default(), cx); } Some(worktree.read(cx).id()) } else { None }; - if settings.prettier.allowed { - if let Some(prettier_plugins) = prettier_store::prettier_plugins_for_language(&settings) - { - let prettier_store = self.as_local().map(|s| s.prettier_store.clone()); - if let Some(prettier_store) = prettier_store { - prettier_store.update(cx, |prettier_store, cx| { - prettier_store.install_default_prettier( - worktree_id, - prettier_plugins.iter().map(|s| Arc::from(s.as_str())), - cx, - ) - }) - } + if settings.prettier.allowed + && let Some(prettier_plugins) = prettier_store::prettier_plugins_for_language(&settings) + { + let prettier_store = self.as_local().map(|s| s.prettier_store.clone()); + if let Some(prettier_store) = prettier_store { + prettier_store.update(cx, |prettier_store, cx| { + prettier_store.install_default_prettier( + worktree_id, + prettier_plugins.iter().map(|s| Arc::from(s.as_str())), + cx, + ) + }) } } @@ -4457,32 +4366,27 @@ impl LspStore { } pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { - if let Some((client, downstream_project_id)) = self.downstream_client.clone() { - if let Some(diangostic_summaries) = self.diagnostic_summaries.get(&worktree.id()) { - let mut summaries = - diangostic_summaries - .into_iter() - .flat_map(|(path, summaries)| { - summaries - .into_iter() - .map(|(server_id, summary)| summary.to_proto(*server_id, path)) - }); - if let Some(summary) = summaries.next() { - client - .send(proto::UpdateDiagnosticSummary { - project_id: downstream_project_id, - worktree_id: worktree.id().to_proto(), - summary: Some(summary), - more_summaries: summaries.collect(), - }) - .log_err(); - } + if let Some((client, downstream_project_id)) = self.downstream_client.clone() + && let Some(diangostic_summaries) = self.diagnostic_summaries.get(&worktree.id()) + { + let mut summaries = diangostic_summaries.iter().flat_map(|(path, summaries)| { + summaries + .iter() + .map(|(server_id, summary)| summary.to_proto(*server_id, path)) + }); + if let Some(summary) = summaries.next() { + client + .send(proto::UpdateDiagnosticSummary { + project_id: downstream_project_id, + worktree_id: worktree.id().to_proto(), + summary: Some(summary), + more_summaries: summaries.collect(), + }) + .log_err(); } } } - // TODO: remove MultiLspQuery: instead, the proto handler should pick appropriate server(s) - // Then, use `send_lsp_proto_request` or analogue for most of the LSP proto requests and inline this check inside fn is_capable_for_proto_request( &self, buffer: &Entity, @@ -4529,7 +4433,7 @@ impl LspStore { .contains(&server_status.name) .then_some(server_id) }) - .filter_map(|server_id| self.lsp_server_capabilities.get(&server_id)) + .filter_map(|server_id| self.lsp_server_capabilities.get(server_id)) .any(check) } @@ -4606,7 +4510,7 @@ impl LspStore { if !request.check_capabilities(language_server.adapter_server_capabilities()) { return Task::ready(Ok(Default::default())); } - return cx.spawn(async move |this, cx| { + cx.spawn(async move |this, cx| { let lsp_request = language_server.request::(lsp_params); let id = lsp_request.id(); @@ -4655,7 +4559,7 @@ impl LspStore { anyhow::anyhow!(message) })?; - let response = request + request .response_from_lsp( response, this.upgrade().context("no app context")?, @@ -4663,9 +4567,8 @@ impl LspStore { language_server.server_id(), cx.clone(), ) - .await; - response - }); + .await + }) } fn on_settings_changed(&mut self, cx: &mut Context) { @@ -4683,7 +4586,7 @@ impl LspStore { } } - self.refresh_server_tree(cx); + self.request_workspace_config_refresh(); if let Some(prettier_store) = self.as_local().map(|s| s.prettier_store.clone()) { prettier_store.update(cx, |prettier_store, cx| { @@ -4696,158 +4599,150 @@ impl LspStore { fn refresh_server_tree(&mut self, cx: &mut Context) { let buffer_store = self.buffer_store.clone(); - if let Some(local) = self.as_local_mut() { - let mut adapters = BTreeMap::default(); - let get_adapter = { - let languages = local.languages.clone(); - let environment = local.environment.clone(); - let weak = local.weak.clone(); - let worktree_store = local.worktree_store.clone(); - let http_client = local.http_client.clone(); - let fs = local.fs.clone(); - move |worktree_id, cx: &mut App| { - let worktree = worktree_store.read(cx).worktree_for_id(worktree_id, cx)?; - Some(LocalLspAdapterDelegate::new( - languages.clone(), - &environment, - weak.clone(), - &worktree, - http_client.clone(), - fs.clone(), - cx, - )) - } - }; + let Some(local) = self.as_local_mut() else { + return; + }; + let mut adapters = BTreeMap::default(); + let get_adapter = { + let languages = local.languages.clone(); + let environment = local.environment.clone(); + let weak = local.weak.clone(); + let worktree_store = local.worktree_store.clone(); + let http_client = local.http_client.clone(); + let fs = local.fs.clone(); + move |worktree_id, cx: &mut App| { + let worktree = worktree_store.read(cx).worktree_for_id(worktree_id, cx)?; + Some(LocalLspAdapterDelegate::new( + languages.clone(), + &environment, + weak.clone(), + &worktree, + http_client.clone(), + fs.clone(), + cx, + )) + } + }; - let mut messages_to_report = Vec::new(); - let to_stop = local.lsp_tree.clone().update(cx, |lsp_tree, cx| { - let mut rebase = lsp_tree.rebase(); - for buffer_handle in buffer_store.read(cx).buffers().sorted_by_key(|buffer| { - Reverse( - File::from_dyn(buffer.read(cx).file()) - .map(|file| file.worktree.read(cx).is_visible()), - ) - }) { - let buffer = buffer_handle.read(cx); - let buffer_id = buffer.remote_id(); - if !local.registered_buffers.contains_key(&buffer_id) { - continue; - } - if let Some((file, language)) = File::from_dyn(buffer.file()) - .cloned() - .zip(buffer.language().map(|l| l.name())) + let mut messages_to_report = Vec::new(); + let (new_tree, to_stop) = { + let mut rebase = local.lsp_tree.rebase(); + let buffers = buffer_store + .read(cx) + .buffers() + .filter_map(|buffer| { + let raw_buffer = buffer.read(cx); + if !local + .registered_buffers + .contains_key(&raw_buffer.remote_id()) { - let worktree_id = file.worktree_id(cx); - let Some(worktree) = local - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - else { - continue; - }; + return None; + } + let file = File::from_dyn(raw_buffer.file()).cloned()?; + let language = raw_buffer.language().cloned()?; + Some((file, language, raw_buffer.remote_id())) + }) + .sorted_by_key(|(file, _, _)| Reverse(file.worktree.read(cx).is_visible())); - let Some((reused, delegate, nodes)) = local - .reuse_existing_language_server( - rebase.server_tree(), - &worktree, - &language, - cx, - ) - .map(|(delegate, servers)| (true, delegate, servers)) - .or_else(|| { - let lsp_delegate = adapters - .entry(worktree_id) - .or_insert_with(|| get_adapter(worktree_id, cx)) - .clone()?; - let delegate = Arc::new(ManifestQueryDelegate::new( - worktree.read(cx).snapshot(), - )); - let path = file - .path() - .parent() - .map(Arc::from) - .unwrap_or_else(|| file.path().clone()); - let worktree_path = ProjectPath { worktree_id, path }; - - let nodes = rebase.get( - worktree_path, - AdapterQuery::Language(&language), - delegate.clone(), - cx, - ); + for (file, language, buffer_id) in buffers { + let worktree_id = file.worktree_id(cx); + let Some(worktree) = local + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + continue; + }; - Some((false, lsp_delegate, nodes.collect())) - }) - else { - continue; - }; + if let Some((_, apply)) = local.reuse_existing_language_server( + rebase.server_tree(), + &worktree, + &language.name(), + cx, + ) { + (apply)(rebase.server_tree()); + } else if let Some(lsp_delegate) = adapters + .entry(worktree_id) + .or_insert_with(|| get_adapter(worktree_id, cx)) + .clone() + { + let delegate = + Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot())); + let path = file + .path() + .parent() + .map(Arc::from) + .unwrap_or_else(|| file.path().clone()); + let worktree_path = ProjectPath { worktree_id, path }; + let abs_path = file.abs_path(cx); + let worktree_root = worktree.read(cx).abs_path(); + let nodes = rebase + .walk( + worktree_path, + language.name(), + language.manifest(), + delegate.clone(), + cx, + ) + .collect::>(); - let abs_path = file.abs_path(cx); - for node in nodes { - if !reused { - let server_id = node.server_id_or_init( - |LaunchDisposition { - server_name, - - path, - settings, - }| - { - let uri = Url::from_file_path( - worktree.read(cx).abs_path().join(&path.path), - ); - let key = (worktree_id, server_name.clone()); - local.language_server_ids.remove(&key); - - let adapter = local - .languages - .lsp_adapters(&language) - .into_iter() - .find(|adapter| &adapter.name() == server_name) - .expect("To find LSP adapter"); - let server_id = local.start_language_server( - &worktree, - delegate.clone(), - adapter, - settings, - cx, - ); - if let Some(state) = - local.language_servers.get(&server_id) - { - if let Ok(uri) = uri { - state.add_workspace_folder(uri); - }; - } - server_id - } - ); + for node in nodes { + let server_id = node.server_id_or_init(|disposition| { + let path = &disposition.path; + let uri = Url::from_file_path(worktree_root.join(&path.path)); + let key = LanguageServerSeed { + worktree_id, + name: disposition.server_name.clone(), + settings: disposition.settings.clone(), + toolchain: local.toolchain_store.read(cx).active_toolchain( + path.worktree_id, + &path.path, + language.name(), + ), + }; + local.language_server_ids.remove(&key); - if let Some(language_server_id) = server_id { - messages_to_report.push(LspStoreEvent::LanguageServerUpdate { - language_server_id, - name: node.name(), - message: - proto::update_language_server::Variant::RegisteredForBuffer( - proto::RegisteredForBuffer { - buffer_abs_path: abs_path.to_string_lossy().to_string(), - buffer_id: buffer_id.to_proto(), - }, - ), - }); - } - } + let server_id = local.get_or_insert_language_server( + &worktree, + lsp_delegate.clone(), + disposition, + &language.name(), + cx, + ); + if let Some(state) = local.language_servers.get(&server_id) + && let Ok(uri) = uri + { + state.add_workspace_folder(uri); + }; + server_id + }); + + if let Some(language_server_id) = server_id { + messages_to_report.push(LspStoreEvent::LanguageServerUpdate { + language_server_id, + name: node.name(), + message: + proto::update_language_server::Variant::RegisteredForBuffer( + proto::RegisteredForBuffer { + buffer_abs_path: abs_path.to_string_lossy().to_string(), + buffer_id: buffer_id.to_proto(), + }, + ), + }); } } + } else { + continue; } - rebase.finish() - }); - for message in messages_to_report { - cx.emit(message); - } - for (id, _) in to_stop { - self.stop_local_language_server(id, cx).detach(); } + rebase.finish() + }; + for message in messages_to_report { + cx.emit(message); + } + local.lsp_tree = new_tree; + for (id, _) in to_stop { + self.stop_local_language_server(id, cx).detach(); } } @@ -4879,7 +4774,7 @@ impl LspStore { .await }) } else if self.mode.is_local() { - let Some((lsp_adapter, lang_server)) = buffer_handle.update(cx, |buffer, cx| { + let Some((_, lang_server)) = buffer_handle.update(cx, |buffer, cx| { self.language_server_for_local_buffer(buffer, action.server_id, cx) .map(|(adapter, server)| (adapter.clone(), server.clone())) }) else { @@ -4889,19 +4784,18 @@ impl LspStore { LocalLspStore::try_resolve_code_action(&lang_server, &mut action) .await .context("resolving a code action")?; - if let Some(edit) = action.lsp_action.edit() { - if edit.changes.is_some() || edit.document_changes.is_some() { + if let Some(edit) = action.lsp_action.edit() + && (edit.changes.is_some() || edit.document_changes.is_some()) { return LocalLspStore::deserialize_workspace_edit( this.upgrade().context("no app present")?, edit.clone(), push_to_history, - lsp_adapter.clone(), + lang_server.clone(), cx, ) .await; } - } if let Some(command) = action.lsp_action.command() { let server_capabilities = lang_server.capabilities(); @@ -4953,7 +4847,7 @@ impl LspStore { push_to_history: bool, cx: &mut Context, ) -> Task> { - if let Some(_) = self.as_local() { + if self.as_local().is_some() { cx.spawn(async move |lsp_store, cx| { let buffers = buffers.into_iter().collect::>(); let result = LocalLspStore::execute_code_action_kind_locally( @@ -5343,154 +5237,130 @@ impl LspStore { pub fn definitions( &mut self, - buffer_handle: &Entity, + buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetDefinitions { position }; - if !self.is_capable_for_proto_request(buffer_handle, &request, cx) { - return Task::ready(Ok(Vec::new())); + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer_handle.read(cx).remote_id().into(), - version: serialize_version(&buffer_handle.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDefinition( - request.to_proto(project_id, buffer_handle.read(cx)), - )), - }); - let buffer = buffer_handle.clone(); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); + let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDefinitionResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|definitions_response| { - GetDefinitions { position }.response_from_proto( - definitions_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetDefinitions { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let definitions_task = self.request_multiple_lsp_locally( - buffer_handle, + buffer, Some(position), GetDefinitions { position }, cx, ); cx.background_spawn(async move { - Ok(definitions_task - .await - .into_iter() - .flat_map(|(_, definitions)| definitions) - .dedup() - .collect()) + Ok(Some( + definitions_task + .await + .into_iter() + .flat_map(|(_, definitions)| definitions) + .dedup() + .collect(), + )) }) } } pub fn declarations( &mut self, - buffer_handle: &Entity, + buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetDeclarations { position }; - if !self.is_capable_for_proto_request(buffer_handle, &request, cx) { - return Task::ready(Ok(Vec::new())); + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer_handle.read(cx).remote_id().into(), - version: serialize_version(&buffer_handle.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDeclaration( - request.to_proto(project_id, buffer_handle.read(cx)), - )), - }); - let buffer = buffer_handle.clone(); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); + let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDeclarationResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|declarations_response| { - GetDeclarations { position }.response_from_proto( - declarations_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetDeclarations { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let declarations_task = self.request_multiple_lsp_locally( - buffer_handle, + buffer, Some(position), GetDeclarations { position }, cx, ); cx.background_spawn(async move { - Ok(declarations_task - .await - .into_iter() - .flat_map(|(_, declarations)| declarations) - .dedup() - .collect()) + Ok(Some( + declarations_task + .await + .into_iter() + .flat_map(|(_, declarations)| declarations) + .dedup() + .collect(), + )) }) } } @@ -5500,59 +5370,45 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetTypeDefinitions { position }; - if !self.is_capable_for_proto_request(&buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetTypeDefinition( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetTypeDefinitionResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|type_definitions_response| { - GetTypeDefinitions { position }.response_from_proto( - type_definitions_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetTypeDefinitions { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let type_definitions_task = self.request_multiple_lsp_locally( @@ -5562,12 +5418,14 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(type_definitions_task - .await - .into_iter() - .flat_map(|(_, type_definitions)| type_definitions) - .dedup() - .collect()) + Ok(Some( + type_definitions_task + .await + .into_iter() + .flat_map(|(_, type_definitions)| type_definitions) + .dedup() + .collect(), + )) }) } } @@ -5577,59 +5435,45 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetImplementations { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetImplementation( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetImplementationResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|implementations_response| { - GetImplementations { position }.response_from_proto( - implementations_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetImplementations { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let implementations_task = self.request_multiple_lsp_locally( @@ -5639,12 +5483,14 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(implementations_task - .await - .into_iter() - .flat_map(|(_, implementations)| implementations) - .dedup() - .collect()) + Ok(Some( + implementations_task + .await + .into_iter() + .flat_map(|(_, implementations)| implementations) + .dedup() + .collect(), + )) }) } } @@ -5654,59 +5500,44 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetReferences { position }; - if !self.is_capable_for_proto_request(&buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetReferences( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); + }; + let Some(responses) = request_task.await? else { + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetReferencesResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|references_response| { - GetReferences { position }.response_from_proto( - references_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) - .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + let locations = join_all(responses.payload.into_iter().map(|lsp_response| { + GetReferences { position }.response_from_proto( + lsp_response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) + .await + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(); + Ok(Some(locations)) }) } else { let references_task = self.request_multiple_lsp_locally( @@ -5716,12 +5547,14 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(references_task - .await - .into_iter() - .flat_map(|(_, references)| references) - .dedup() - .collect()) + Ok(Some( + references_task + .await + .into_iter() + .flat_map(|(_, references)| references) + .dedup() + .collect(), + )) }) } } @@ -5732,82 +5565,67 @@ impl LspStore { range: Range, kinds: Option>, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetCodeActions { range: range.clone(), kinds: kinds.clone(), }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetCodeActions( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetCodeActionsResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|code_actions_response| { - GetCodeActions { - range: range.clone(), - kinds: kinds.clone(), - } - .response_from_proto( - code_actions_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetCodeActions { + range: range.clone(), + kinds: kinds.clone(), + } + .response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .collect(), + )) }) } else { let all_actions_task = self.request_multiple_lsp_locally( buffer, Some(range.start), - GetCodeActions { - range: range.clone(), - kinds: kinds.clone(), - }, + GetCodeActions { range, kinds }, cx, ); cx.background_spawn(async move { - Ok(all_actions_task - .await - .into_iter() - .flat_map(|(_, actions)| actions) - .collect()) + Ok(Some( + all_actions_task + .await + .into_iter() + .flat_map(|(_, actions)| actions) + .collect(), + )) }) } } @@ -5820,28 +5638,30 @@ impl LspStore { let version_queried_for = buffer.read(cx).version(); let buffer_id = buffer.read(cx).remote_id(); - if let Some(cached_data) = self.lsp_code_lens.get(&buffer_id) { - if !version_queried_for.changed_since(&cached_data.lens_for_version) { - let has_different_servers = self.as_local().is_some_and(|local| { - local - .buffers_opened_in_servers - .get(&buffer_id) - .cloned() - .unwrap_or_default() - != cached_data.lens.keys().copied().collect() - }); - if !has_different_servers { - return Task::ready(Ok(cached_data.lens.values().flatten().cloned().collect())) - .shared(); - } + if let Some(cached_data) = self.lsp_code_lens.get(&buffer_id) + && !version_queried_for.changed_since(&cached_data.lens_for_version) + { + let has_different_servers = self.as_local().is_some_and(|local| { + local + .buffers_opened_in_servers + .get(&buffer_id) + .cloned() + .unwrap_or_default() + != cached_data.lens.keys().copied().collect() + }); + if !has_different_servers { + return Task::ready(Ok(Some( + cached_data.lens.values().flatten().cloned().collect(), + ))) + .shared(); } } let lsp_data = self.lsp_code_lens.entry(buffer_id).or_default(); - if let Some((updating_for, running_update)) = &lsp_data.update { - if !version_queried_for.changed_since(&updating_for) { - return running_update.clone(); - } + if let Some((updating_for, running_update)) = &lsp_data.update + && !version_queried_for.changed_since(updating_for) + { + return running_update.clone(); } let buffer = buffer.clone(); let query_version_queried_for = version_queried_for.clone(); @@ -5871,17 +5691,19 @@ impl LspStore { lsp_store .update(cx, |lsp_store, _| { let lsp_data = lsp_store.lsp_code_lens.entry(buffer_id).or_default(); - if lsp_data.lens_for_version == query_version_queried_for { - lsp_data.lens.extend(fetched_lens.clone()); - } else if !lsp_data - .lens_for_version - .changed_since(&query_version_queried_for) - { - lsp_data.lens_for_version = query_version_queried_for; - lsp_data.lens = fetched_lens.clone(); + if let Some(fetched_lens) = fetched_lens { + if lsp_data.lens_for_version == query_version_queried_for { + lsp_data.lens.extend(fetched_lens); + } else if !lsp_data + .lens_for_version + .changed_since(&query_version_queried_for) + { + lsp_data.lens_for_version = query_version_queried_for; + lsp_data.lens = fetched_lens; + } } lsp_data.update = None; - lsp_data.lens.values().flatten().cloned().collect() + Some(lsp_data.lens.values().flatten().cloned().collect()) }) .map_err(Arc::new) }) @@ -5894,64 +5716,40 @@ impl LspStore { &mut self, buffer: &Entity, cx: &mut Context, - ) -> Task>>> { + ) -> Task>>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetCodeLens; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(HashMap::default())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetCodeLens( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_lsp_store, cx| { let Some(lsp_store) = weak_lsp_store.upgrade() else { - return Ok(HashMap::default()); + return Ok(None); }; - let responses = request_task.await?.responses; - let code_lens_actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| { - let response = match lsp_response.response? { - proto::lsp_response::Response::GetCodeLensResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }?; - let server_id = LanguageServerId::from_proto(lsp_response.server_id); - Some((server_id, response)) - }) - .map(|(server_id, code_lens_response)| { - let lsp_store = lsp_store.clone(); - let buffer = buffer.clone(); - let cx = cx.clone(); - async move { - ( - server_id, - GetCodeLens - .response_from_proto( - code_lens_response, - lsp_store, - buffer, - cx, - ) - .await, - ) - } - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + + let code_lens_actions = join_all(responses.payload.into_iter().map(|response| { + let lsp_store = lsp_store.clone(); + let buffer = buffer.clone(); + let cx = cx.clone(); + async move { + ( + LanguageServerId::from_proto(response.server_id), + GetCodeLens + .response_from_proto(response.response, lsp_store, buffer, cx) + .await, + ) + } + })) .await; let mut has_errors = false; @@ -5970,14 +5768,14 @@ impl LspStore { !has_errors || !code_lens_actions.is_empty(), "Failed to fetch code lens" ); - Ok(code_lens_actions) + Ok(Some(code_lens_actions)) }) } else { let code_lens_actions_task = self.request_multiple_lsp_locally(buffer, None::, GetCodeLens, cx); - cx.background_spawn( - async move { Ok(code_lens_actions_task.await.into_iter().collect()) }, - ) + cx.background_spawn(async move { + Ok(Some(code_lens_actions_task.await.into_iter().collect())) + }) } } @@ -6456,11 +6254,11 @@ impl LspStore { .old_replace_start .and_then(deserialize_anchor) .zip(response.old_replace_end.and_then(deserialize_anchor)); - if let Some((old_replace_start, old_replace_end)) = replace_range { - if !response.new_text.is_empty() { - completion.new_text = response.new_text; - completion.replace_range = old_replace_start..old_replace_end; - } + if let Some((old_replace_start, old_replace_end)) = replace_range + && !response.new_text.is_empty() + { + completion.new_text = response.new_text; + completion.replace_range = old_replace_start..old_replace_end; } Ok(()) @@ -6593,48 +6391,23 @@ impl LspStore { let buffer_id = buffer.read(cx).remote_id(); if let Some((client, upstream_project_id)) = self.upstream_client() { - if !self.is_capable_for_proto_request( - &buffer, - &GetDocumentDiagnostics { - previous_result_id: None, - }, - cx, - ) { + let request = GetDocumentDiagnostics { + previous_result_id: None, + }; + if !self.is_capable_for_proto_request(&buffer, &request, cx) { return Task::ready(Ok(None)); } - let request_task = client.request(proto::MultiLspQuery { - buffer_id: buffer_id.to_proto(), - version: serialize_version(&buffer.read(cx).version()), - project_id: upstream_project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDocumentDiagnostics( - proto::GetDocumentDiagnostics { - project_id: upstream_project_id, - buffer_id: buffer_id.to_proto(), - version: serialize_version(&buffer.read(cx).version()), - }, - )), - }); + let request_task = client.request_lsp( + upstream_project_id, + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(upstream_project_id, buffer.read(cx)), + ); cx.background_spawn(async move { - let _proto_responses = request_task - .await? - .responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDocumentDiagnosticsResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .collect::>(); // Proto requests cause the diagnostics to be pulled from language server(s) on the local side // and then, buffer state updated with the diagnostics received, which will be later propagated to the client. // Do not attempt to further process the dummy responses here. + let _response = request_task.await?; Ok(None) }) } else { @@ -6835,33 +6608,33 @@ impl LspStore { LspFetchStrategy::UseCache { known_cache_version, } => { - if let Some(cached_data) = self.lsp_document_colors.get(&buffer_id) { - if !version_queried_for.changed_since(&cached_data.colors_for_version) { - let has_different_servers = self.as_local().is_some_and(|local| { - local - .buffers_opened_in_servers - .get(&buffer_id) - .cloned() - .unwrap_or_default() - != cached_data.colors.keys().copied().collect() - }); - if !has_different_servers { - if Some(cached_data.cache_version) == known_cache_version { - return None; - } else { - return Some( - Task::ready(Ok(DocumentColors { - colors: cached_data - .colors - .values() - .flatten() - .cloned() - .collect(), - cache_version: Some(cached_data.cache_version), - })) - .shared(), - ); - } + if let Some(cached_data) = self.lsp_document_colors.get(&buffer_id) + && !version_queried_for.changed_since(&cached_data.colors_for_version) + { + let has_different_servers = self.as_local().is_some_and(|local| { + local + .buffers_opened_in_servers + .get(&buffer_id) + .cloned() + .unwrap_or_default() + != cached_data.colors.keys().copied().collect() + }); + if !has_different_servers { + if Some(cached_data.cache_version) == known_cache_version { + return None; + } else { + return Some( + Task::ready(Ok(DocumentColors { + colors: cached_data + .colors + .values() + .flatten() + .cloned() + .collect(), + cache_version: Some(cached_data.cache_version), + })) + .shared(), + ); } } } @@ -6869,10 +6642,10 @@ impl LspStore { } let lsp_data = self.lsp_document_colors.entry(buffer_id).or_default(); - if let Some((updating_for, running_update)) = &lsp_data.colors_update { - if !version_queried_for.changed_since(&updating_for) { - return Some(running_update.clone()); - } + if let Some((updating_for, running_update)) = &lsp_data.colors_update + && !version_queried_for.changed_since(updating_for) + { + return Some(running_update.clone()); } let query_version_queried_for = version_queried_for.clone(); let new_task = cx @@ -6919,16 +6692,18 @@ impl LspStore { .update(cx, |lsp_store, _| { let lsp_data = lsp_store.lsp_document_colors.entry(buffer_id).or_default(); - if lsp_data.colors_for_version == query_version_queried_for { - lsp_data.colors.extend(fetched_colors.clone()); - lsp_data.cache_version += 1; - } else if !lsp_data - .colors_for_version - .changed_since(&query_version_queried_for) - { - lsp_data.colors_for_version = query_version_queried_for; - lsp_data.colors = fetched_colors.clone(); - lsp_data.cache_version += 1; + if let Some(fetched_colors) = fetched_colors { + if lsp_data.colors_for_version == query_version_queried_for { + lsp_data.colors.extend(fetched_colors); + lsp_data.cache_version += 1; + } else if !lsp_data + .colors_for_version + .changed_since(&query_version_queried_for) + { + lsp_data.colors_for_version = query_version_queried_for; + lsp_data.colors = fetched_colors; + lsp_data.cache_version += 1; + } } lsp_data.colors_update = None; let colors = lsp_data @@ -6953,56 +6728,45 @@ impl LspStore { &mut self, buffer: &Entity, cx: &mut Context, - ) -> Task>>> { + ) -> Task>>>> { if let Some((client, project_id)) = self.upstream_client() { let request = GetDocumentColor {}; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(HashMap::default())); + return Task::ready(Ok(None)); } - let request_task = client.request(proto::MultiLspQuery { + let request_task = client.request_lsp( project_id, - buffer_id: buffer.read(cx).remote_id().to_proto(), - version: serialize_version(&buffer.read(cx).version()), - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDocumentColor( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); - cx.spawn(async move |project, cx| { - let Some(project) = project.upgrade() else { - return Ok(HashMap::default()); + cx.spawn(async move |lsp_store, cx| { + let Some(project) = lsp_store.upgrade() else { + return Ok(None); }; let colors = join_all( request_task .await .log_err() - .map(|response| response.responses) + .flatten() + .map(|response| response.payload) .unwrap_or_default() .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDocumentColorResponse(response) => { - Some(( - LanguageServerId::from_proto(lsp_response.server_id), - response, - )) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|(server_id, color_response)| { + .map(|color_response| { let response = request.response_from_proto( - color_response, + color_response.response, project.clone(), buffer.clone(), cx.clone(), ); - async move { (server_id, response.await.log_err().unwrap_or_default()) } + async move { + ( + LanguageServerId::from_proto(color_response.server_id), + response.await.log_err().unwrap_or_default(), + ) + } }), ) .await @@ -7013,23 +6777,25 @@ impl LspStore { .extend(colors); acc }); - Ok(colors) + Ok(Some(colors)) }) } else { let document_colors_task = self.request_multiple_lsp_locally(buffer, None::, GetDocumentColor, cx); cx.background_spawn(async move { - Ok(document_colors_task - .await - .into_iter() - .fold(HashMap::default(), |mut acc, (server_id, colors)| { - acc.entry(server_id) - .or_insert_with(HashSet::default) - .extend(colors); - acc - }) - .into_iter() - .collect()) + Ok(Some( + document_colors_task + .await + .into_iter() + .fold(HashMap::default(), |mut acc, (server_id, colors)| { + acc.entry(server_id) + .or_insert_with(HashSet::default) + .extend(colors); + acc + }) + .into_iter() + .collect(), + )) }) } } @@ -7039,49 +6805,34 @@ impl LspStore { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task> { + ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); if let Some((client, upstream_project_id)) = self.upstream_client() { let request = GetSignatureHelp { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Vec::new()); + return Task::ready(None); } - let request_task = client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), - project_id: upstream_project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetSignatureHelp( - request.to_proto(upstream_project_id, buffer.read(cx)), - )), - }); + let request_task = client.request_lsp( + upstream_project_id, + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(upstream_project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { - return Vec::new(); - }; - join_all( + let project = weak_project.upgrade()?; + let signatures = join_all( request_task .await .log_err() - .map(|response| response.responses) + .flatten() + .map(|response| response.payload) .unwrap_or_default() .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetSignatureHelpResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|signature_response| { + .map(|response| { let response = GetSignatureHelp { position }.response_from_proto( - signature_response, + response.response, project.clone(), buffer.clone(), cx.clone(), @@ -7092,7 +6843,8 @@ impl LspStore { .await .into_iter() .flatten() - .collect() + .collect(); + Some(signatures) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -7102,11 +6854,13 @@ impl LspStore { cx, ); cx.background_spawn(async move { - all_actions_task - .await - .into_iter() - .flat_map(|(_, actions)| actions) - .collect::>() + Some( + all_actions_task + .await + .into_iter() + .flat_map(|(_, actions)| actions) + .collect::>(), + ) }) } } @@ -7116,47 +6870,32 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task> { + ) -> Task>> { if let Some((client, upstream_project_id)) = self.upstream_client() { let request = GetHover { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Vec::new()); + return Task::ready(None); } - let request_task = client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), - project_id: upstream_project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetHover( - request.to_proto(upstream_project_id, buffer.read(cx)), - )), - }); + let request_task = client.request_lsp( + upstream_project_id, + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(upstream_project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { - return Vec::new(); - }; - join_all( + let project = weak_project.upgrade()?; + let hovers = join_all( request_task .await .log_err() - .map(|response| response.responses) + .flatten() + .map(|response| response.payload) .unwrap_or_default() .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetHoverResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|hover_response| { + .map(|response| { let response = GetHover { position }.response_from_proto( - hover_response, + response.response, project.clone(), buffer.clone(), cx.clone(), @@ -7173,7 +6912,8 @@ impl LspStore { .await .into_iter() .flatten() - .collect() + .collect(); + Some(hovers) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -7183,11 +6923,13 @@ impl LspStore { cx, ); cx.background_spawn(async move { - all_actions_task - .await - .into_iter() - .filter_map(|(_, hover)| remove_empty_hover_blocks(hover?)) - .collect::>() + Some( + all_actions_task + .await + .into_iter() + .filter_map(|(_, hover)| remove_empty_hover_blocks(hover?)) + .collect::>(), + ) }) } } @@ -7223,11 +6965,11 @@ impl LspStore { let mut requests = Vec::new(); let mut requested_servers = BTreeSet::new(); - 'next_server: for ((worktree_id, _), server_ids) in local.language_server_ids.iter() { + for (seed, state) in local.language_server_ids.iter() { let Some(worktree_handle) = self .worktree_store .read(cx) - .worktree_for_id(*worktree_id, cx) + .worktree_for_id(seed.worktree_id, cx) else { continue; }; @@ -7236,31 +6978,30 @@ impl LspStore { continue; } - let mut servers_to_query = server_ids - .difference(&requested_servers) - .cloned() - .collect::>(); - for server_id in &servers_to_query { - let (lsp_adapter, server) = match local.language_servers.get(server_id) { - Some(LanguageServerState::Running { - adapter, server, .. - }) => (adapter.clone(), server), - - _ => continue 'next_server, + if !requested_servers.insert(state.id) { + continue; + } + + let (lsp_adapter, server) = match local.language_servers.get(&state.id) { + Some(LanguageServerState::Running { + adapter, server, .. + }) => (adapter.clone(), server), + + _ => continue, + }; + let supports_workspace_symbol_request = + match server.capabilities().workspace_symbol_provider { + Some(OneOf::Left(supported)) => supported, + Some(OneOf::Right(_)) => true, + None => false, }; - let supports_workspace_symbol_request = - match server.capabilities().workspace_symbol_provider { - Some(OneOf::Left(supported)) => supported, - Some(OneOf::Right(_)) => true, - None => false, - }; - if !supports_workspace_symbol_request { - continue 'next_server; - } - let worktree_abs_path = worktree.abs_path().clone(); - let worktree_handle = worktree_handle.clone(); - let server_id = server.server_id(); - requests.push( + if !supports_workspace_symbol_request { + continue; + } + let worktree_abs_path = worktree.abs_path().clone(); + let worktree_handle = worktree_handle.clone(); + let server_id = server.server_id(); + requests.push( server .request::( lsp::WorkspaceSymbolParams { @@ -7302,8 +7043,6 @@ impl LspStore { } }), ); - } - requested_servers.append(&mut servers_to_query); } cx.spawn(async move |this, cx| { @@ -7332,7 +7071,7 @@ impl LspStore { worktree = tree; path = rel_path; } else { - worktree = source_worktree.clone(); + worktree = source_worktree; path = relativize_path(&result.worktree_abs_path, &abs_path); } @@ -7401,7 +7140,7 @@ impl LspStore { include_ignored || worktree .entry_for_path(path.as_ref()) - .map_or(false, |entry| !entry.is_ignored) + .is_some_and(|entry| !entry.is_ignored) }) .flat_map(move |(path, summaries)| { summaries.iter().map(move |(server_id, summary)| { @@ -7566,7 +7305,7 @@ impl LspStore { None } - pub(crate) async fn refresh_workspace_configurations( + async fn refresh_workspace_configurations( lsp_store: &WeakEntity, fs: Arc, cx: &mut AsyncApp, @@ -7575,90 +7314,83 @@ impl LspStore { let mut refreshed_servers = HashSet::default(); let servers = lsp_store .update(cx, |lsp_store, cx| { - let toolchain_store = lsp_store.toolchain_store(cx); - let Some(local) = lsp_store.as_local() else { - return Vec::default(); - }; - local + let local = lsp_store.as_local()?; + + let servers = local .language_server_ids .iter() - .flat_map(|((worktree_id, _), server_ids)| { + .filter_map(|(seed, state)| { let worktree = lsp_store .worktree_store .read(cx) - .worktree_for_id(*worktree_id, cx); - let delegate = worktree.map(|worktree| { - LocalLspAdapterDelegate::new( - local.languages.clone(), - &local.environment, - cx.weak_entity(), - &worktree, - local.http_client.clone(), - local.fs.clone(), - cx, - ) - }); + .worktree_for_id(seed.worktree_id, cx); + let delegate: Arc = + worktree.map(|worktree| { + LocalLspAdapterDelegate::new( + local.languages.clone(), + &local.environment, + cx.weak_entity(), + &worktree, + local.http_client.clone(), + local.fs.clone(), + cx, + ) + })?; + let server_id = state.id; - let fs = fs.clone(); - let toolchain_store = toolchain_store.clone(); - server_ids.iter().filter_map(|server_id| { - let delegate = delegate.clone()? as Arc; - let states = local.language_servers.get(server_id)?; - - match states { - LanguageServerState::Starting { .. } => None, - LanguageServerState::Running { - adapter, server, .. - } => { - let fs = fs.clone(); - let toolchain_store = toolchain_store.clone(); - let adapter = adapter.clone(); - let server = server.clone(); - refreshed_servers.insert(server.name()); - Some(cx.spawn(async move |_, cx| { - let settings = - LocalLspStore::workspace_configuration_for_adapter( - adapter.adapter.clone(), - fs.as_ref(), - &delegate, - toolchain_store, - cx, - ) - .await - .ok()?; - server - .notify::( - &lsp::DidChangeConfigurationParams { settings }, - ) - .ok()?; - Some(()) - })) - } + let states = local.language_servers.get(&server_id)?; + + match states { + LanguageServerState::Starting { .. } => None, + LanguageServerState::Running { + adapter, server, .. + } => { + let fs = fs.clone(); + + let adapter = adapter.clone(); + let server = server.clone(); + refreshed_servers.insert(server.name()); + let toolchain = seed.toolchain.clone(); + Some(cx.spawn(async move |_, cx| { + let settings = + LocalLspStore::workspace_configuration_for_adapter( + adapter.adapter.clone(), + fs.as_ref(), + &delegate, + toolchain, + cx, + ) + .await + .ok()?; + server + .notify::( + &lsp::DidChangeConfigurationParams { settings }, + ) + .ok()?; + Some(()) + })) } - }).collect::>() + } }) - .collect::>() + .collect::>(); + + Some(servers) }) - .ok()?; + .ok() + .flatten()?; - log::info!("Refreshing workspace configurations for servers {refreshed_servers:?}"); + log::debug!("Refreshing workspace configurations for servers {refreshed_servers:?}"); // TODO this asynchronous job runs concurrently with extension (de)registration and may take enough time for a certain extension // to stop and unregister its language server wrapper. // This is racy : an extension might have already removed all `local.language_servers` state, but here we `.clone()` and hold onto it anyway. // This now causes errors in the logs, we should find a way to remove such servers from the processing everywhere. let _: Vec> = join_all(servers).await; + Some(()) }) .await; } - fn toolchain_store(&self, cx: &App) -> Arc { - if let Some(toolchain_store) = self.toolchain_store.as_ref() { - toolchain_store.read(cx).as_language_toolchain_store() - } else { - Arc::new(EmptyToolchainStore) - } - } fn maintain_workspace_config( fs: Arc, external_refresh_requests: watch::Receiver<()>, @@ -7673,8 +7405,19 @@ impl LspStore { let mut joint_future = futures::stream::select(settings_changed_rx, external_refresh_requests); + // Multiple things can happen when a workspace environment (selected toolchain + settings) change: + // - We might shut down a language server if it's no longer enabled for a given language (and there are no buffers using it otherwise). + // - We might also shut it down when the workspace configuration of all of the users of a given language server converges onto that of the other. + // - In the same vein, we might also decide to start a new language server if the workspace configuration *diverges* from the other. + // - In the easiest case (where we're not wrangling the lifetime of a language server anyhow), if none of the roots of a single language server diverge in their configuration, + // but it is still different to what we had before, we're gonna send out a workspace configuration update. cx.spawn(async move |this, cx| { while let Some(()) = joint_future.next().await { + this.update(cx, |this, cx| { + this.refresh_server_tree(cx); + }) + .ok(); + Self::refresh_workspace_configurations(&this, fs.clone(), cx).await; } @@ -7768,12 +7511,19 @@ impl LspStore { pub(crate) fn set_language_server_statuses_from_proto( &mut self, language_servers: Vec, + server_capabilities: Vec, ) { self.language_server_statuses = language_servers .into_iter() - .map(|server| { + .zip(server_capabilities) + .map(|(server, server_capabilities)| { + let server_id = LanguageServerId(server.id as usize); + if let Ok(server_capabilities) = serde_json::from_str(&server_capabilities) { + self.lsp_server_capabilities + .insert(server_id, server_capabilities); + } ( - LanguageServerId(server.id as usize), + server_id, LanguageServerStatus { name: LanguageServerName::from_proto(server.name), pending_work: Default::default(), @@ -7785,47 +7535,6 @@ impl LspStore { .collect(); } - fn register_local_language_server( - &mut self, - worktree: Entity, - language_server_name: LanguageServerName, - language_server_id: LanguageServerId, - cx: &mut App, - ) { - let Some(local) = self.as_local_mut() else { - return; - }; - - let worktree_id = worktree.read(cx).id(); - if worktree.read(cx).is_visible() { - let path = ProjectPath { - worktree_id, - path: Arc::from("".as_ref()), - }; - let delegate = Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot())); - local.lsp_tree.update(cx, |language_server_tree, cx| { - for node in language_server_tree.get( - path, - AdapterQuery::Adapter(&language_server_name), - delegate, - cx, - ) { - node.server_id_or_init(|disposition| { - assert_eq!(disposition.server_name, &language_server_name); - - language_server_id - }); - } - }); - } - - local - .language_server_ids - .entry((worktree_id, language_server_name)) - .or_default() - .insert(language_server_id); - } - #[cfg(test)] pub fn update_diagnostic_entries( &mut self, @@ -7937,7 +7646,7 @@ impl LspStore { } None => { diagnostics_summary = Some(proto::UpdateDiagnosticSummary { - project_id: project_id, + project_id, worktree_id: worktree_id.to_proto(), summary: Some(proto::DiagnosticSummary { path: project_path.path.as_ref().to_proto(), @@ -8055,17 +7764,12 @@ impl LspStore { .await }) } else if let Some(local) = self.as_local() { - let Some(language_server_id) = local - .language_server_ids - .get(&( - symbol.source_worktree_id, - symbol.language_server_name.clone(), - )) - .and_then(|ids| { - ids.contains(&symbol.source_language_server_id) - .then_some(symbol.source_language_server_id) - }) - else { + let is_valid = local.language_server_ids.iter().any(|(seed, state)| { + seed.worktree_id == symbol.source_worktree_id + && state.id == symbol.source_language_server_id + && symbol.language_server_name == seed.name + }); + if !is_valid { return Task::ready(Err(anyhow!( "language server for worktree and language not found" ))); @@ -8089,22 +7793,16 @@ impl LspStore { return Task::ready(Err(anyhow!("invalid symbol path"))); }; - self.open_local_buffer_via_lsp( - symbol_uri, - language_server_id, - symbol.language_server_name.clone(), - cx, - ) + self.open_local_buffer_via_lsp(symbol_uri, symbol.source_language_server_id, cx) } else { Task::ready(Err(anyhow!("no upstream client or local store"))) } } - pub fn open_local_buffer_via_lsp( + pub(crate) fn open_local_buffer_via_lsp( &mut self, mut abs_path: lsp::Url, language_server_id: LanguageServerId, - language_server_name: LanguageServerName, cx: &mut Context, ) -> Task>> { cx.spawn(async move |lsp_store, cx| { @@ -8155,12 +7853,13 @@ impl LspStore { if worktree.read_with(cx, |worktree, _| worktree.is_local())? { lsp_store .update(cx, |lsp_store, cx| { - lsp_store.register_local_language_server( - worktree.clone(), - language_server_name, - language_server_id, - cx, - ) + if let Some(local) = lsp_store.as_local_mut() { + local.register_language_server_for_invisible_worktree( + &worktree, + language_server_id, + cx, + ) + } }) .ok(); } @@ -8293,12 +7992,209 @@ impl LspStore { })? } + async fn handle_lsp_query( + lsp_store: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + use proto::lsp_query::Request; + let sender_id = envelope.original_sender_id().unwrap_or_default(); + let lsp_query = envelope.payload; + let lsp_request_id = LspRequestId(lsp_query.lsp_request_id); + match lsp_query.request.context("invalid LSP query request")? { + Request::GetReferences(get_references) => { + let position = get_references.position.clone().and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_references, + position, + cx.clone(), + ) + .await?; + } + Request::GetDocumentColor(get_document_color) => { + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_document_color, + None, + cx.clone(), + ) + .await?; + } + Request::GetHover(get_hover) => { + let position = get_hover.position.clone().and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_hover, + position, + cx.clone(), + ) + .await?; + } + Request::GetCodeActions(get_code_actions) => { + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_code_actions, + None, + cx.clone(), + ) + .await?; + } + Request::GetSignatureHelp(get_signature_help) => { + let position = get_signature_help + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_signature_help, + position, + cx.clone(), + ) + .await?; + } + Request::GetCodeLens(get_code_lens) => { + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_code_lens, + None, + cx.clone(), + ) + .await?; + } + Request::GetDefinition(get_definition) => { + let position = get_definition.position.clone().and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_definition, + position, + cx.clone(), + ) + .await?; + } + Request::GetDeclaration(get_declaration) => { + let position = get_declaration + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_declaration, + position, + cx.clone(), + ) + .await?; + } + Request::GetTypeDefinition(get_type_definition) => { + let position = get_type_definition + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_type_definition, + position, + cx.clone(), + ) + .await?; + } + Request::GetImplementation(get_implementation) => { + let position = get_implementation + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_implementation, + position, + cx.clone(), + ) + .await?; + } + // Diagnostics pull synchronizes internally via the buffer state, and cannot be handled generically as the other requests. + Request::GetDocumentDiagnostics(get_document_diagnostics) => { + let buffer_id = BufferId::new(get_document_diagnostics.buffer_id())?; + let version = deserialize_version(get_document_diagnostics.buffer_version()); + let buffer = lsp_store.update(&mut cx, |this, cx| { + this.buffer_store.read(cx).get_existing(buffer_id) + })??; + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_version(version.clone()) + })? + .await?; + lsp_store.update(&mut cx, |lsp_store, cx| { + let existing_queries = lsp_store + .running_lsp_requests + .entry(TypeId::of::()) + .or_default(); + if ::ProtoRequest::stop_previous_requests( + ) || buffer.read(cx).version.changed_since(&existing_queries.0) + { + existing_queries.1.clear(); + } + existing_queries.1.insert( + lsp_request_id, + cx.spawn(async move |lsp_store, cx| { + let diagnostics_pull = lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.pull_diagnostics_for_buffer(buffer, cx) + }) + .ok(); + if let Some(diagnostics_pull) = diagnostics_pull { + match diagnostics_pull.await { + Ok(()) => {} + Err(e) => log::error!("Failed to pull diagnostics: {e:#}"), + }; + } + }), + ); + })?; + } + } + Ok(proto::Ack {}) + } + + async fn handle_lsp_query_response( + lsp_store: Entity, + envelope: TypedEnvelope, + cx: AsyncApp, + ) -> Result<()> { + lsp_store.read_with(&cx, |lsp_store, _| { + if let Some((upstream_client, _)) = lsp_store.upstream_client() { + upstream_client.handle_lsp_response(envelope.clone()); + } + })?; + Ok(()) + } + + // todo(lsp) remove after Zed Stable hits v0.204.x async fn handle_multi_lsp_query( lsp_store: Entity, envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let response_from_ssh = lsp_store.read_with(&mut cx, |this, _| { + let response_from_ssh = lsp_store.read_with(&cx, |this, _| { let (upstream_client, project_id) = this.upstream_client()?; let mut payload = envelope.payload.clone(); payload.project_id = project_id; @@ -8320,7 +8216,7 @@ impl LspStore { buffer.wait_for_version(version.clone()) })? .await?; - let buffer_version = buffer.read_with(&mut cx, |buffer, _| buffer.version())?; + let buffer_version = buffer.read_with(&cx, |buffer, _| buffer.version())?; match envelope .payload .strategy @@ -8861,12 +8757,12 @@ impl LspStore { })? .context("worktree not found")?; let (old_abs_path, new_abs_path) = { - let root_path = worktree.read_with(&mut cx, |this, _| this.abs_path())?; + let root_path = worktree.read_with(&cx, |this, _| this.abs_path())?; let new_path = PathBuf::from_proto(envelope.payload.new_path.clone()); (root_path.join(&old_path), root_path.join(&new_path)) }; - Self::will_rename_entry( + let _transaction = Self::will_rename_entry( this.downgrade(), worktree_id, &old_abs_path, @@ -8876,7 +8772,7 @@ impl LspStore { ) .await; let response = Worktree::handle_rename_entry(worktree, envelope.payload, cx.clone()).await; - this.read_with(&mut cx, |this, _| { + this.read_with(&cx, |this, _| { this.did_rename_entry(worktree_id, &old_abs_path, &new_abs_path, is_dir); }) .ok(); @@ -8912,12 +8808,11 @@ impl LspStore { if summary.is_empty() { if let Some(worktree_summaries) = lsp_store.diagnostic_summaries.get_mut(&worktree_id) + && let Some(summaries) = worktree_summaries.get_mut(&path) { - if let Some(summaries) = worktree_summaries.get_mut(&path) { - summaries.remove(&server_id); - if summaries.is_empty() { - worktree_summaries.remove(&path); - } + summaries.remove(&server_id); + if summaries.is_empty() { + worktree_summaries.remove(&path); } } } else { @@ -9111,10 +9006,10 @@ impl LspStore { async fn handle_lsp_ext_cancel_flycheck( lsp_store: Entity, envelope: TypedEnvelope, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result { let server_id = LanguageServerId(envelope.payload.language_server_id as usize); - lsp_store.read_with(&mut cx, |lsp_store, _| { + lsp_store.read_with(&cx, |lsp_store, _| { if let Some(server) = lsp_store.language_server_for_id(server_id) { server .notify::(&()) @@ -9163,10 +9058,10 @@ impl LspStore { async fn handle_lsp_ext_clear_flycheck( lsp_store: Entity, envelope: TypedEnvelope, - mut cx: AsyncApp, + cx: AsyncApp, ) -> Result { let server_id = LanguageServerId(envelope.payload.language_server_id as usize); - lsp_store.read_with(&mut cx, |lsp_store, _| { + lsp_store.read_with(&cx, |lsp_store, _| { if let Some(server) = lsp_store.language_server_for_id(server_id) { server .notify::(&()) @@ -9329,7 +9224,7 @@ impl LspStore { new_path: &Path, is_dir: bool, cx: AsyncApp, - ) -> Task<()> { + ) -> Task { let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from); let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from); cx.spawn(async move |cx| { @@ -9345,11 +9240,7 @@ impl LspStore { else { continue; }; - let Some(adapter) = - this.language_server_adapter_for_id(language_server.server_id()) - else { - continue; - }; + if filter.should_send_will_rename(&old_uri, is_dir) { let apply_edit = cx.spawn({ let old_uri = old_uri.clone(); @@ -9366,17 +9257,16 @@ impl LspStore { .log_err() .flatten()?; - LocalLspStore::deserialize_workspace_edit( + let transaction = LocalLspStore::deserialize_workspace_edit( this.upgrade()?, edit, false, - adapter.clone(), language_server.clone(), cx, ) .await - .ok(); - Some(()) + .ok()?; + Some(transaction) } }); tasks.push(apply_edit); @@ -9386,11 +9276,17 @@ impl LspStore { }) .ok() .flatten(); + let mut merged_transaction = ProjectTransaction::default(); for task in tasks { // Await on tasks sequentially so that the order of application of edits is deterministic // (at least with regards to the order of registration of language servers) - task.await; + if let Some(transaction) = task.await { + for (buffer, buffer_transaction) in transaction.0 { + merged_transaction.0.insert(buffer, buffer_transaction); + } + } } + merged_transaction }) } @@ -9427,16 +9323,7 @@ impl LspStore { } pub fn language_server_for_id(&self, id: LanguageServerId) -> Option> { - let local_lsp_store = self.as_local()?; - if let Some(LanguageServerState::Running { server, .. }) = - local_lsp_store.language_servers.get(&id) - { - Some(server.clone()) - } else if let Some((_, server)) = local_lsp_store.supplementary_language_servers.get(&id) { - Some(Arc::clone(server)) - } else { - None - } + self.as_local()?.language_server_for_id(id) } fn on_lsp_progress( @@ -9500,9 +9387,7 @@ impl LspStore { let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token .as_ref() - .map_or(false, |disk_based_token| { - token.starts_with(disk_based_token) - }); + .is_some_and(|disk_based_token| token.starts_with(disk_based_token)); match progress { lsp::WorkDoneProgress::Begin(report) => { @@ -9632,10 +9517,10 @@ impl LspStore { cx: &mut Context, ) { if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) { - if let Some(work) = status.pending_work.remove(&token) { - if !work.is_disk_based_diagnostics_progress { - cx.emit(LspStoreEvent::RefreshInlayHints); - } + if let Some(work) = status.pending_work.remove(&token) + && !work.is_disk_based_diagnostics_progress + { + cx.emit(LspStoreEvent::RefreshInlayHints); } cx.notify(); } @@ -9948,7 +9833,7 @@ impl LspStore { let peer_id = envelope.original_sender_id().unwrap_or_default(); let symbol = envelope.payload.symbol.context("invalid symbol")?; let symbol = Self::deserialize_symbol(symbol)?; - let symbol = this.read_with(&mut cx, |this, _| { + let symbol = this.read_with(&cx, |this, _| { let signature = this.symbol_signature(&symbol.path); anyhow::ensure!(signature == symbol.signature, "invalid symbol signature"); Ok(symbol) @@ -10198,7 +10083,7 @@ impl LspStore { ) -> Shared>>> { if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) { environment.update(cx, |env, cx| { - env.get_buffer_environment(&buffer, &self.worktree_store, cx) + env.get_buffer_environment(buffer, &self.worktree_store, cx) }) } else { Task::ready(None).shared() @@ -10214,7 +10099,7 @@ impl LspStore { cx: &mut Context, ) -> Task> { let logger = zlog::scoped!("format"); - if let Some(_) = self.as_local() { + if self.as_local().is_some() { zlog::trace!(logger => "Formatting locally"); let logger = zlog::scoped!(logger => "local"); let buffers = buffers @@ -10429,10 +10314,10 @@ impl LspStore { None => None, }; - if let Some(server) = server { - if let Some(shutdown) = server.shutdown() { - shutdown.await; - } + if let Some(server) = server + && let Some(shutdown) = server.shutdown() + { + shutdown.await; } } @@ -10442,28 +10327,18 @@ impl LspStore { &mut self, server_id: LanguageServerId, cx: &mut Context, - ) -> Task> { + ) -> Task<()> { let local = match &mut self.mode { LspStoreMode::Local(local) => local, _ => { - return Task::ready(Vec::new()); + return Task::ready(()); } }; - let mut orphaned_worktrees = Vec::new(); // Remove this server ID from all entries in the given worktree. - local.language_server_ids.retain(|(worktree, _), ids| { - if !ids.remove(&server_id) { - return true; - } - - if ids.is_empty() { - orphaned_worktrees.push(*worktree); - false - } else { - true - } - }); + local + .language_server_ids + .retain(|_, state| state.id != server_id); self.buffer_store.update(cx, |buffer_store, cx| { for buffer in buffer_store.buffers() { buffer.update(cx, |buffer, cx| { @@ -10516,7 +10391,7 @@ impl LspStore { let name = self .language_server_statuses .remove(&server_id) - .map(|status| status.name.clone()) + .map(|status| status.name) .or_else(|| { if let Some(LanguageServerState::Running { adapter, .. }) = server_state.as_ref() { Some(adapter.name()) @@ -10542,14 +10417,13 @@ impl LspStore { cx.notify(); }) .ok(); - orphaned_worktrees }); } if server_state.is_some() { cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); } - Task::ready(orphaned_worktrees) + Task::ready(()) } pub fn stop_all_language_servers(&mut self, cx: &mut Context) { @@ -10568,12 +10442,9 @@ impl LspStore { let language_servers_to_stop = local .language_server_ids .values() - .flatten() - .copied() + .map(|state| state.id) .collect(); - local.lsp_tree.update(cx, |this, _| { - this.remove_nodes(&language_servers_to_stop); - }); + local.lsp_tree.remove_nodes(&language_servers_to_stop); let tasks = language_servers_to_stop .into_iter() .map(|server| self.stop_local_language_server(server, cx)) @@ -10720,37 +10591,31 @@ impl LspStore { for buffer in buffers { buffer.update(cx, |buffer, cx| { language_servers_to_stop.extend(local.language_server_ids_for_buffer(buffer, cx)); - if let Some(worktree_id) = buffer.file().map(|f| f.worktree_id(cx)) { - if covered_worktrees.insert(worktree_id) { - language_server_names_to_stop.retain(|name| { - match local.language_server_ids.get(&(worktree_id, name.clone())) { - Some(server_ids) => { - language_servers_to_stop - .extend(server_ids.into_iter().copied()); - false - } - None => true, - } - }); - } + if let Some(worktree_id) = buffer.file().map(|f| f.worktree_id(cx)) + && covered_worktrees.insert(worktree_id) + { + language_server_names_to_stop.retain(|name| { + let old_ids_count = language_servers_to_stop.len(); + let all_language_servers_with_this_name = local + .language_server_ids + .iter() + .filter_map(|(seed, state)| seed.name.eq(name).then(|| state.id)); + language_servers_to_stop.extend(all_language_servers_with_this_name); + old_ids_count == language_servers_to_stop.len() + }); } }); } for name in language_server_names_to_stop { - if let Some(server_ids) = local - .language_server_ids - .iter() - .filter(|((_, server_name), _)| server_name == &name) - .map(|((_, _), server_ids)| server_ids) - .max_by_key(|server_ids| server_ids.len()) - { - language_servers_to_stop.extend(server_ids.into_iter().copied()); - } + language_servers_to_stop.extend( + local + .language_server_ids + .iter() + .filter_map(|(seed, v)| seed.name.eq(&name).then(|| v.id)), + ); } - local.lsp_tree.update(cx, |this, _| { - this.remove_nodes(&language_servers_to_stop); - }); + local.lsp_tree.remove_nodes(&language_servers_to_stop); let tasks = language_servers_to_stop .into_iter() .map(|server| self.stop_local_language_server(server, cx)) @@ -10855,7 +10720,7 @@ impl LspStore { let is_supporting = diagnostic .related_information .as_ref() - .map_or(false, |infos| { + .is_some_and(|infos| { infos.iter().any(|info| { primary_diagnostic_group_ids.contains_key(&( source, @@ -10868,11 +10733,11 @@ impl LspStore { let is_unnecessary = diagnostic .tags .as_ref() - .map_or(false, |tags| tags.contains(&DiagnosticTag::UNNECESSARY)); + .is_some_and(|tags| tags.contains(&DiagnosticTag::UNNECESSARY)); let underline = self .language_server_adapter_for_id(server_id) - .map_or(true, |adapter| adapter.underline_diagnostic(diagnostic)); + .is_none_or(|adapter| adapter.underline_diagnostic(diagnostic)); if is_supporting { supporting_diagnostics.insert( @@ -10882,7 +10747,7 @@ impl LspStore { } else { let group_id = post_inc(&mut self.as_local_mut().unwrap().next_diagnostic_group_id); let is_disk_based = - source.map_or(false, |source| disk_based_sources.contains(source)); + source.is_some_and(|source| disk_based_sources.contains(source)); sources_by_group_id.insert(group_id, source); primary_diagnostic_group_ids @@ -10973,7 +10838,7 @@ impl LspStore { adapter: Arc, language_server: Arc, server_id: LanguageServerId, - key: (WorktreeId, LanguageServerName), + key: LanguageServerSeed, workspace_folders: Arc>>, cx: &mut Context, ) { @@ -10985,7 +10850,7 @@ impl LspStore { if local .language_server_ids .get(&key) - .map(|ids| !ids.contains(&server_id)) + .map(|state| state.id != server_id) .unwrap_or(false) { return; @@ -11042,7 +10907,7 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerAdded( server_id, language_server.name(), - Some(key.0), + Some(key.worktree_id), )); cx.emit(LspStoreEvent::RefreshInlayHints); @@ -11054,7 +10919,7 @@ impl LspStore { server: Some(proto::LanguageServer { id: server_id.to_proto(), name: language_server.name().to_string(), - worktree_id: Some(key.0.to_proto()), + worktree_id: Some(key.worktree_id.to_proto()), }), capabilities: serde_json::to_string(&server_capabilities) .expect("serializing server LSP capabilities"), @@ -11066,13 +10931,13 @@ impl LspStore { // Tell the language server about every open buffer in the worktree that matches the language. // Also check for buffers in worktrees that reused this server - let mut worktrees_using_server = vec![key.0]; + let mut worktrees_using_server = vec![key.worktree_id]; if let Some(local) = self.as_local() { // Find all worktrees that have this server in their language server tree - for (worktree_id, servers) in &local.lsp_tree.read(cx).instances { - if *worktree_id != key.0 { - for (_, server_map) in &servers.roots { - if server_map.contains_key(&key.1) { + for (worktree_id, servers) in &local.lsp_tree.instances { + if *worktree_id != key.worktree_id { + for server_map in servers.roots.values() { + if server_map.contains_key(&key.name) { worktrees_using_server.push(*worktree_id); } } @@ -11098,7 +10963,7 @@ impl LspStore { .languages .lsp_adapters(&language.name()) .iter() - .any(|a| a.name == key.1) + .any(|a| a.name == key.name) { continue; } @@ -11242,10 +11107,10 @@ impl LspStore { if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) { for (token, progress) in &status.pending_work { - if let Some(token_to_cancel) = token_to_cancel.as_ref() { - if token != token_to_cancel { - continue; - } + if let Some(token_to_cancel) = token_to_cancel.as_ref() + && token != token_to_cancel + { + continue; } if progress.is_cancellable { server @@ -11336,18 +11201,14 @@ impl LspStore { let Some(local) = self.as_local() else { return }; local.prettier_store.update(cx, |prettier_store, cx| { - prettier_store.update_prettier_settings(&worktree_handle, changes, cx) + prettier_store.update_prettier_settings(worktree_handle, changes, cx) }); let worktree_id = worktree_handle.read(cx).id(); let mut language_server_ids = local .language_server_ids .iter() - .flat_map(|((server_worktree, _), server_ids)| { - server_ids - .iter() - .filter_map(|server_id| server_worktree.eq(&worktree_id).then(|| *server_id)) - }) + .filter_map(|(seed, v)| seed.worktree_id.eq(&worktree_id).then(|| v.id)) .collect::>(); language_server_ids.sort(); language_server_ids.dedup(); @@ -11356,41 +11217,47 @@ impl LspStore { for server_id in &language_server_ids { if let Some(LanguageServerState::Running { server, .. }) = local.language_servers.get(server_id) - { - if let Some(watched_paths) = local + && let Some(watched_paths) = local .language_server_watched_paths .get(server_id) .and_then(|paths| paths.worktree_paths.get(&worktree_id)) - { - let params = lsp::DidChangeWatchedFilesParams { - changes: changes - .iter() - .filter_map(|(path, _, change)| { - if !watched_paths.is_match(path) { - return None; - } - let typ = match change { - PathChange::Loaded => return None, - PathChange::Added => lsp::FileChangeType::CREATED, - PathChange::Removed => lsp::FileChangeType::DELETED, - PathChange::Updated => lsp::FileChangeType::CHANGED, - PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED, - }; - Some(lsp::FileEvent { - uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(), - typ, - }) + { + let params = lsp::DidChangeWatchedFilesParams { + changes: changes + .iter() + .filter_map(|(path, _, change)| { + if !watched_paths.is_match(path) { + return None; + } + let typ = match change { + PathChange::Loaded => return None, + PathChange::Added => lsp::FileChangeType::CREATED, + PathChange::Removed => lsp::FileChangeType::DELETED, + PathChange::Updated => lsp::FileChangeType::CHANGED, + PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED, + }; + Some(lsp::FileEvent { + uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(), + typ, }) - .collect(), - }; - if !params.changes.is_empty() { - server - .notify::(¶ms) - .ok(); - } + }) + .collect(), + }; + if !params.changes.is_empty() { + server + .notify::(¶ms) + .ok(); } } } + for (path, _, _) in changes { + if let Some(file_name) = path.file_name().and_then(|file_name| file_name.to_str()) + && local.watched_manifest_filenames.contains(file_name) + { + self.request_workspace_config_refresh(); + break; + } + } } pub fn wait_for_remote_buffer( @@ -11801,6 +11668,520 @@ impl LspStore { .log_err(); } } + + fn register_server_capabilities( + &mut self, + server_id: LanguageServerId, + params: lsp::RegistrationParams, + cx: &mut Context, + ) -> anyhow::Result<()> { + let server = self + .language_server_for_id(server_id) + .with_context(|| format!("no server {server_id} found"))?; + for reg in params.registrations { + match reg.method.as_str() { + "workspace/didChangeWatchedFiles" => { + if let Some(options) = reg.register_options { + let notify = if let Some(local_lsp_store) = self.as_local_mut() { + let caps = serde_json::from_value(options)?; + local_lsp_store + .on_lsp_did_change_watched_files(server_id, ®.id, caps, cx); + true + } else { + false + }; + if notify { + notify_server_capabilities_updated(&server, cx); + } + } + } + "workspace/didChangeConfiguration" => { + // Ignore payload since we notify clients of setting changes unconditionally, relying on them pulling the latest settings. + } + "workspace/symbol" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.workspace_symbol_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "workspace/fileOperations" => { + if let Some(options) = reg.register_options { + let caps = serde_json::from_value(options)?; + server.update_capabilities(|capabilities| { + capabilities + .workspace + .get_or_insert_default() + .file_operations = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "workspace/executeCommand" => { + if let Some(options) = reg.register_options { + let options = serde_json::from_value(options)?; + server.update_capabilities(|capabilities| { + capabilities.execute_command_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/rangeFormatting" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.document_range_formatting_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/onTypeFormatting" => { + if let Some(options) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.document_on_type_formatting_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/formatting" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.document_formatting_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/rename" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.rename_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/inlayHint" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.inlay_hint_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/documentSymbol" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.document_symbol_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/codeAction" => { + if let Some(options) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.code_action_provider = + Some(lsp::CodeActionProviderCapability::Options(options)); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/definition" => { + if let Some(options) = parse_register_capabilities(reg)? { + server.update_capabilities(|capabilities| { + capabilities.definition_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/completion" => { + if let Some(caps) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.completion_provider = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/hover" => { + if let Some(caps) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.hover_provider = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/signatureHelp" => { + if let Some(caps) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.signature_help_provider = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/didChange" => { + if let Some(sync_kind) = reg + .register_options + .and_then(|opts| opts.get("syncKind").cloned()) + .map(serde_json::from_value::) + .transpose()? + { + server.update_capabilities(|capabilities| { + let mut sync_options = + Self::take_text_document_sync_options(capabilities); + sync_options.change = Some(sync_kind); + capabilities.text_document_sync = + Some(lsp::TextDocumentSyncCapability::Options(sync_options)); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/didSave" => { + if let Some(include_text) = reg + .register_options + .map(|opts| { + let transpose = opts + .get("includeText") + .cloned() + .map(serde_json::from_value::>) + .transpose(); + match transpose { + Ok(value) => Ok(value.flatten()), + Err(e) => Err(e), + } + }) + .transpose()? + { + server.update_capabilities(|capabilities| { + let mut sync_options = + Self::take_text_document_sync_options(capabilities); + sync_options.save = + Some(TextDocumentSyncSaveOptions::SaveOptions(lsp::SaveOptions { + include_text, + })); + capabilities.text_document_sync = + Some(lsp::TextDocumentSyncCapability::Options(sync_options)); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/codeLens" => { + if let Some(caps) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.code_lens_provider = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/diagnostic" => { + if let Some(caps) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.diagnostic_provider = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + "textDocument/colorProvider" => { + if let Some(caps) = reg + .register_options + .map(serde_json::from_value) + .transpose()? + { + server.update_capabilities(|capabilities| { + capabilities.color_provider = Some(caps); + }); + notify_server_capabilities_updated(&server, cx); + } + } + _ => log::warn!("unhandled capability registration: {reg:?}"), + } + } + + Ok(()) + } + + fn unregister_server_capabilities( + &mut self, + server_id: LanguageServerId, + params: lsp::UnregistrationParams, + cx: &mut Context, + ) -> anyhow::Result<()> { + let server = self + .language_server_for_id(server_id) + .with_context(|| format!("no server {server_id} found"))?; + for unreg in params.unregisterations.iter() { + match unreg.method.as_str() { + "workspace/didChangeWatchedFiles" => { + let notify = if let Some(local_lsp_store) = self.as_local_mut() { + local_lsp_store + .on_lsp_unregister_did_change_watched_files(server_id, &unreg.id, cx); + true + } else { + false + }; + if notify { + notify_server_capabilities_updated(&server, cx); + } + } + "workspace/didChangeConfiguration" => { + // Ignore payload since we notify clients of setting changes unconditionally, relying on them pulling the latest settings. + } + "workspace/symbol" => { + server.update_capabilities(|capabilities| { + capabilities.workspace_symbol_provider = None + }); + notify_server_capabilities_updated(&server, cx); + } + "workspace/fileOperations" => { + server.update_capabilities(|capabilities| { + capabilities + .workspace + .get_or_insert_with(|| lsp::WorkspaceServerCapabilities { + workspace_folders: None, + file_operations: None, + }) + .file_operations = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "workspace/executeCommand" => { + server.update_capabilities(|capabilities| { + capabilities.execute_command_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/rangeFormatting" => { + server.update_capabilities(|capabilities| { + capabilities.document_range_formatting_provider = None + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/onTypeFormatting" => { + server.update_capabilities(|capabilities| { + capabilities.document_on_type_formatting_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/formatting" => { + server.update_capabilities(|capabilities| { + capabilities.document_formatting_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/rename" => { + server.update_capabilities(|capabilities| capabilities.rename_provider = None); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/codeAction" => { + server.update_capabilities(|capabilities| { + capabilities.code_action_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/definition" => { + server.update_capabilities(|capabilities| { + capabilities.definition_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/completion" => { + server.update_capabilities(|capabilities| { + capabilities.completion_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/hover" => { + server.update_capabilities(|capabilities| { + capabilities.hover_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/signatureHelp" => { + server.update_capabilities(|capabilities| { + capabilities.signature_help_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/didChange" => { + server.update_capabilities(|capabilities| { + let mut sync_options = Self::take_text_document_sync_options(capabilities); + sync_options.change = None; + capabilities.text_document_sync = + Some(lsp::TextDocumentSyncCapability::Options(sync_options)); + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/didSave" => { + server.update_capabilities(|capabilities| { + let mut sync_options = Self::take_text_document_sync_options(capabilities); + sync_options.save = None; + capabilities.text_document_sync = + Some(lsp::TextDocumentSyncCapability::Options(sync_options)); + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/codeLens" => { + server.update_capabilities(|capabilities| { + capabilities.code_lens_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/diagnostic" => { + server.update_capabilities(|capabilities| { + capabilities.diagnostic_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + "textDocument/colorProvider" => { + server.update_capabilities(|capabilities| { + capabilities.color_provider = None; + }); + notify_server_capabilities_updated(&server, cx); + } + _ => log::warn!("unhandled capability unregistration: {unreg:?}"), + } + } + + Ok(()) + } + + async fn query_lsp_locally( + lsp_store: Entity, + sender_id: proto::PeerId, + lsp_request_id: LspRequestId, + proto_request: T::ProtoRequest, + position: Option, + mut cx: AsyncApp, + ) -> Result<()> + where + T: LspCommand + Clone, + T::ProtoRequest: proto::LspRequestMessage, + ::Response: + Into<::Response>, + { + let buffer_id = BufferId::new(proto_request.buffer_id())?; + let version = deserialize_version(proto_request.buffer_version()); + let buffer = lsp_store.update(&mut cx, |this, cx| { + this.buffer_store.read(cx).get_existing(buffer_id) + })??; + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_version(version.clone()) + })? + .await?; + let buffer_version = buffer.read_with(&cx, |buffer, _| buffer.version())?; + let request = + T::from_proto(proto_request, lsp_store.clone(), buffer.clone(), cx.clone()).await?; + lsp_store.update(&mut cx, |lsp_store, cx| { + let request_task = + lsp_store.request_multiple_lsp_locally(&buffer, position, request, cx); + let existing_queries = lsp_store + .running_lsp_requests + .entry(TypeId::of::()) + .or_default(); + if T::ProtoRequest::stop_previous_requests() + || buffer_version.changed_since(&existing_queries.0) + { + existing_queries.1.clear(); + } + existing_queries.1.insert( + lsp_request_id, + cx.spawn(async move |lsp_store, cx| { + let response = request_task.await; + lsp_store + .update(cx, |lsp_store, cx| { + if let Some((client, project_id)) = lsp_store.downstream_client.clone() + { + let response = response + .into_iter() + .map(|(server_id, response)| { + ( + server_id.to_proto(), + T::response_to_proto( + response, + lsp_store, + sender_id, + &buffer_version, + cx, + ) + .into(), + ) + }) + .collect::>(); + match client.send_lsp_response::( + project_id, + lsp_request_id, + response, + ) { + Ok(()) => {} + Err(e) => { + log::error!("Failed to send LSP response: {e:#}",) + } + } + } + }) + .ok(); + }), + ); + })?; + Ok(()) + } + + fn take_text_document_sync_options( + capabilities: &mut lsp::ServerCapabilities, + ) -> lsp::TextDocumentSyncOptions { + match capabilities.text_document_sync.take() { + Some(lsp::TextDocumentSyncCapability::Options(sync_options)) => sync_options, + Some(lsp::TextDocumentSyncCapability::Kind(sync_kind)) => { + let mut sync_options = lsp::TextDocumentSyncOptions::default(); + sync_options.change = Some(sync_kind); + sync_options + } + None => lsp::TextDocumentSyncOptions::default(), + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn forget_code_lens_task(&mut self, buffer_id: BufferId) -> Option { + let data = self.lsp_code_lens.get_mut(&buffer_id)?; + Some(data.update.take()?.1) + } +} + +// Registration with registerOptions as null, should fallback to true. +// https://github.com/microsoft/vscode-languageserver-node/blob/d90a87f9557a0df9142cfb33e251cfa6fe27d970/client/src/common/client.ts#L2133 +fn parse_register_capabilities( + reg: lsp::Registration, +) -> anyhow::Result>> { + Ok(match reg.register_options { + Some(options) => Some(OneOf::Right(serde_json::from_value::(options)?)), + None => Some(OneOf::Left(true)), + }) } fn subscribe_to_binary_statuses( @@ -12035,11 +12416,10 @@ async fn populate_labels_for_completions( let lsp_completions = new_completions .iter() .filter_map(|new_completion| { - if let Some(lsp_completion) = new_completion.source.lsp_completion(true) { - Some(lsp_completion.into_owned()) - } else { - None - } + new_completion + .source + .lsp_completion(true) + .map(|lsp_completion| lsp_completion.into_owned()) }) .collect::>(); @@ -12059,11 +12439,7 @@ async fn populate_labels_for_completions( for completion in new_completions { match completion.source.lsp_completion(true) { Some(lsp_completion) => { - let documentation = if let Some(docs) = lsp_completion.documentation.clone() { - Some(docs.into()) - } else { - None - }; + let documentation = lsp_completion.documentation.clone().map(|docs| docs.into()); let mut label = labels.next().flatten().unwrap_or_else(|| { CodeLabel::fallback_for_completion(&lsp_completion, language.as_deref()) @@ -12159,7 +12535,7 @@ impl TryFrom<&FileOperationFilter> for RenameActionPredicate { ops.pattern .options .as_ref() - .map_or(false, |ops| ops.ignore_case.unwrap_or(false)), + .is_some_and(|ops| ops.ignore_case.unwrap_or(false)), ) .build()? .compile_matcher(), @@ -12174,7 +12550,7 @@ struct RenameActionPredicate { impl RenameActionPredicate { // Returns true if language server should be notified fn eval(&self, path: &str, is_dir: bool) -> bool { - self.kind.as_ref().map_or(true, |kind| { + self.kind.as_ref().is_none_or(|kind| { let expected_kind = if is_dir { FileOperationPatternKind::Folder } else { @@ -12451,7 +12827,7 @@ impl DiagnosticSummary { } pub fn to_proto( - &self, + self, language_server_id: LanguageServerId, path: &Path, ) -> proto::DiagnosticSummary { @@ -12568,7 +12944,7 @@ impl LspAdapter for SshLspAdapter { async fn check_if_user_installed( &self, _: &dyn LspAdapterDelegate, - _: Arc, + _: Option, _: &AsyncApp, ) -> Option { Some(self.binary.clone()) @@ -12891,24 +13267,18 @@ async fn populate_labels_for_symbols( fn include_text(server: &lsp::LanguageServer) -> Option { match server.capabilities().text_document_sync.as_ref()? { - lsp::TextDocumentSyncCapability::Kind(kind) => match *kind { - lsp::TextDocumentSyncKind::NONE => None, - lsp::TextDocumentSyncKind::FULL => Some(true), - lsp::TextDocumentSyncKind::INCREMENTAL => Some(false), - _ => None, - }, - lsp::TextDocumentSyncCapability::Options(options) => match options.save.as_ref()? { - lsp::TextDocumentSyncSaveOptions::Supported(supported) => { - if *supported { - Some(true) - } else { - None - } - } + lsp::TextDocumentSyncCapability::Options(opts) => match opts.save.as_ref()? { + // Server wants didSave but didn't specify includeText. + lsp::TextDocumentSyncSaveOptions::Supported(true) => Some(false), + // Server doesn't want didSave at all. + lsp::TextDocumentSyncSaveOptions::Supported(false) => None, + // Server provided SaveOptions. lsp::TextDocumentSyncSaveOptions::SaveOptions(save_options) => { Some(save_options.include_text.unwrap_or(false)) } }, + // We do not have any save info. Kind affects didChange only. + lsp::TextDocumentSyncCapability::Kind(_) => None, } } @@ -12925,10 +13295,10 @@ fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) { let mut offset_map = vec![0; label.text.len() + 1]; let mut last_char_was_space = false; let mut new_idx = 0; - let mut chars = label.text.char_indices().fuse(); + let chars = label.text.char_indices().fuse(); let mut newlines_removed = false; - while let Some((idx, c)) = chars.next() { + for (idx, c) in chars { offset_map[idx] = new_idx; match c { diff --git a/crates/project/src/lsp_store/clangd_ext.rs b/crates/project/src/lsp_store/clangd_ext.rs index 274b1b898086eeddf72710052397dd9963833663..b02f68dd4d1271ca9a8fa97e9ef41e03fdfe9763 100644 --- a/crates/project/src/lsp_store/clangd_ext.rs +++ b/crates/project/src/lsp_store/clangd_ext.rs @@ -58,7 +58,7 @@ pub fn register_notifications( language_server .on_notification::({ - let adapter = adapter.clone(); + let adapter = adapter; let this = lsp_store; move |params: InactiveRegionsParams, cx| { diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index cb13fa5efcfd753e0ffb12fbcc0f3d84e09ff370..1c969f8114eb7647e7c109baf2a7b70339997b41 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -115,14 +115,14 @@ impl LspCommand for ExpandMacro { message: Self::ProtoRequest, _: Entity, buffer: Entity, - mut cx: AsyncApp, + cx: AsyncApp, ) -> anyhow::Result { let position = message .position .and_then(deserialize_anchor) .context("invalid position")?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -249,14 +249,14 @@ impl LspCommand for OpenDocs { message: Self::ProtoRequest, _: Entity, buffer: Entity, - mut cx: AsyncApp, + cx: AsyncApp, ) -> anyhow::Result { let position = message .position .and_then(deserialize_anchor) .context("invalid position")?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } @@ -462,14 +462,14 @@ impl LspCommand for GoToParentModule { request: Self::ProtoRequest, _: Entity, buffer: Entity, - mut cx: AsyncApp, + cx: AsyncApp, ) -> anyhow::Result { let position = request .position .and_then(deserialize_anchor) .context("bad request with bad position")?; Ok(Self { - position: buffer.read_with(&mut cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, }) } diff --git a/crates/project/src/lsp_store/rust_analyzer_ext.rs b/crates/project/src/lsp_store/rust_analyzer_ext.rs index 6c425717a82e94985c60db8d1034d470f1aeec35..e5e6338d3c901752e020c634d822903c8e591657 100644 --- a/crates/project/src/lsp_store/rust_analyzer_ext.rs +++ b/crates/project/src/lsp_store/rust_analyzer_ext.rs @@ -34,7 +34,6 @@ pub fn register_notifications(lsp_store: WeakEntity, language_server: language_server .on_notification::({ - let name = name.clone(); move |params, cx| { let message = params.message; let log_message = message.as_ref().map(|message| { diff --git a/crates/project/src/manifest_tree.rs b/crates/project/src/manifest_tree.rs index 7266acb5b4a29b68d8863feb760334de46260424..5a3c7bd40fb11ee5bebe340ddc57ec71a112270b 100644 --- a/crates/project/src/manifest_tree.rs +++ b/crates/project/src/manifest_tree.rs @@ -7,18 +7,12 @@ mod manifest_store; mod path_trie; mod server_tree; -use std::{ - borrow::Borrow, - collections::{BTreeMap, hash_map::Entry}, - ops::ControlFlow, - path::Path, - sync::Arc, -}; +use std::{borrow::Borrow, collections::hash_map::Entry, ops::ControlFlow, path::Path, sync::Arc}; use collections::HashMap; -use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription}; +use gpui::{App, AppContext as _, Context, Entity, Subscription}; use language::{ManifestDelegate, ManifestName, ManifestQuery}; -pub use manifest_store::ManifestProviders; +pub use manifest_store::ManifestProvidersStore; use path_trie::{LabelPresence, RootPathTrie, TriePath}; use settings::{SettingsStore, WorktreeId}; use worktree::{Event as WorktreeEvent, Snapshot, Worktree}; @@ -28,9 +22,7 @@ use crate::{ worktree_store::{WorktreeStore, WorktreeStoreEvent}, }; -pub(crate) use server_tree::{ - AdapterQuery, LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, -}; +pub(crate) use server_tree::{LanguageServerTree, LanguageServerTreeNode, LaunchDisposition}; struct WorktreeRoots { roots: RootPathTrie, @@ -51,12 +43,9 @@ impl WorktreeRoots { match event { WorktreeEvent::UpdatedEntries(changes) => { for (path, _, kind) in changes.iter() { - match kind { - worktree::PathChange::Removed => { - let path = TriePath::from(path.as_ref()); - this.roots.remove(&path); - } - _ => {} + if kind == &worktree::PathChange::Removed { + let path = TriePath::from(path.as_ref()); + this.roots.remove(&path); } } } @@ -81,14 +70,6 @@ pub struct ManifestTree { _subscriptions: [Subscription; 2], } -#[derive(PartialEq)] -pub(crate) enum ManifestTreeEvent { - WorktreeRemoved(WorktreeId), - Cleared, -} - -impl EventEmitter for ManifestTree {} - impl ManifestTree { pub fn new(worktree_store: Entity, cx: &mut App) -> Entity { cx.new(|cx| Self { @@ -96,35 +77,33 @@ impl ManifestTree { _subscriptions: [ cx.subscribe(&worktree_store, Self::on_worktree_store_event), cx.observe_global::(|this, cx| { - for (_, roots) in &mut this.root_points { + for roots in this.root_points.values_mut() { roots.update(cx, |worktree_roots, _| { worktree_roots.roots = RootPathTrie::new(); }) } - cx.emit(ManifestTreeEvent::Cleared); }), ], worktree_store, }) } + pub(crate) fn root_for_path( &mut self, - ProjectPath { worktree_id, path }: ProjectPath, - manifests: &mut dyn Iterator, - delegate: Arc, + ProjectPath { worktree_id, path }: &ProjectPath, + manifest_name: &ManifestName, + delegate: &Arc, cx: &mut App, - ) -> BTreeMap { - debug_assert_eq!(delegate.worktree_id(), worktree_id); - let mut roots = BTreeMap::from_iter( - manifests.map(|manifest| (manifest, (None, LabelPresence::KnownAbsent))), - ); - let worktree_roots = match self.root_points.entry(worktree_id) { + ) -> Option { + debug_assert_eq!(delegate.worktree_id(), *worktree_id); + let (mut marked_path, mut current_presence) = (None, LabelPresence::KnownAbsent); + let worktree_roots = match self.root_points.entry(*worktree_id) { Entry::Occupied(occupied_entry) => occupied_entry.get().clone(), Entry::Vacant(vacant_entry) => { let Some(worktree) = self .worktree_store .read(cx) - .worktree_for_id(worktree_id, cx) + .worktree_for_id(*worktree_id, cx) else { return Default::default(); }; @@ -133,16 +112,16 @@ impl ManifestTree { } }; - let key = TriePath::from(&*path); + let key = TriePath::from(&**path); worktree_roots.read_with(cx, |this, _| { this.roots.walk(&key, &mut |path, labels| { for (label, presence) in labels { - if let Some((marked_path, current_presence)) = roots.get_mut(label) { - if *current_presence > *presence { + if label == manifest_name { + if current_presence > *presence { debug_assert!(false, "RootPathTrie precondition violation; while walking the tree label presence is only allowed to increase"); } - *marked_path = Some(ProjectPath {worktree_id, path: path.clone()}); - *current_presence = *presence; + marked_path = Some(ProjectPath {worktree_id: *worktree_id, path: path.clone()}); + current_presence = *presence; } } @@ -150,12 +129,9 @@ impl ManifestTree { }); }); - for (manifest_name, (root_path, presence)) in &mut roots { - if *presence == LabelPresence::Present { - continue; - } - - let depth = root_path + if current_presence == LabelPresence::KnownAbsent { + // Some part of the path is unexplored. + let depth = marked_path .as_ref() .map(|root_path| { path.strip_prefix(&root_path.path) @@ -165,13 +141,10 @@ impl ManifestTree { }) .unwrap_or_else(|| path.components().count() + 1); - if depth > 0 { - let Some(provider) = ManifestProviders::global(cx).get(manifest_name.borrow()) - else { - log::warn!("Manifest provider `{}` not found", manifest_name.as_ref()); - continue; - }; - + if depth > 0 + && let Some(provider) = + ManifestProvidersStore::global(cx).get(manifest_name.borrow()) + { let root = provider.search(ManifestQuery { path: path.clone(), depth, @@ -182,9 +155,9 @@ impl ManifestTree { let root = TriePath::from(&*known_root); this.roots .insert(&root, manifest_name.clone(), LabelPresence::Present); - *presence = LabelPresence::Present; - *root_path = Some(ProjectPath { - worktree_id, + current_presence = LabelPresence::Present; + marked_path = Some(ProjectPath { + worktree_id: *worktree_id, path: known_root, }); }), @@ -195,27 +168,34 @@ impl ManifestTree { } } } + marked_path.filter(|_| current_presence.eq(&LabelPresence::Present)) + } - roots - .into_iter() - .filter_map(|(k, (path, presence))| { - let path = path?; - presence.eq(&LabelPresence::Present).then(|| (k, path)) + pub(crate) fn root_for_path_or_worktree_root( + &mut self, + project_path: &ProjectPath, + manifest_name: Option<&ManifestName>, + delegate: &Arc, + cx: &mut App, + ) -> ProjectPath { + let worktree_id = project_path.worktree_id; + // Backwards-compat: Fill in any adapters for which we did not detect the root as having the project root at the root of a worktree. + manifest_name + .and_then(|manifest_name| self.root_for_path(project_path, manifest_name, delegate, cx)) + .unwrap_or_else(|| ProjectPath { + worktree_id, + path: Arc::from(Path::new("")), }) - .collect() } + fn on_worktree_store_event( &mut self, _: Entity, evt: &WorktreeStoreEvent, - cx: &mut Context, + _: &mut Context, ) { - match evt { - WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) => { - self.root_points.remove(&worktree_id); - cx.emit(ManifestTreeEvent::WorktreeRemoved(*worktree_id)); - } - _ => {} + if let WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) = evt { + self.root_points.remove(worktree_id); } } } @@ -223,6 +203,7 @@ impl ManifestTree { pub(crate) struct ManifestQueryDelegate { worktree: Snapshot, } + impl ManifestQueryDelegate { pub fn new(worktree: Snapshot) -> Self { Self { worktree } @@ -231,10 +212,8 @@ impl ManifestQueryDelegate { impl ManifestDelegate for ManifestQueryDelegate { fn exists(&self, path: &Path, is_dir: Option) -> bool { - self.worktree.entry_for_path(path).map_or(false, |entry| { - is_dir.map_or(true, |is_required_to_be_dir| { - is_required_to_be_dir == entry.is_dir() - }) + self.worktree.entry_for_path(path).is_some_and(|entry| { + is_dir.is_none_or(|is_required_to_be_dir| is_required_to_be_dir == entry.is_dir()) }) } diff --git a/crates/project/src/manifest_tree/manifest_store.rs b/crates/project/src/manifest_tree/manifest_store.rs index 0462b257985c6ec554519c565f1e935853654e59..cf9f81aee470646d5800ca4a1a4ed7aff4cbd03d 100644 --- a/crates/project/src/manifest_tree/manifest_store.rs +++ b/crates/project/src/manifest_tree/manifest_store.rs @@ -1,4 +1,4 @@ -use collections::HashMap; +use collections::{HashMap, HashSet}; use gpui::{App, Global, SharedString}; use parking_lot::RwLock; use std::{ops::Deref, sync::Arc}; @@ -11,13 +11,13 @@ struct ManifestProvidersState { } #[derive(Clone, Default)] -pub struct ManifestProviders(Arc>); +pub struct ManifestProvidersStore(Arc>); #[derive(Default)] -struct GlobalManifestProvider(ManifestProviders); +struct GlobalManifestProvider(ManifestProvidersStore); impl Deref for GlobalManifestProvider { - type Target = ManifestProviders; + type Target = ManifestProvidersStore; fn deref(&self) -> &Self::Target { &self.0 @@ -26,7 +26,7 @@ impl Deref for GlobalManifestProvider { impl Global for GlobalManifestProvider {} -impl ManifestProviders { +impl ManifestProvidersStore { /// Returns the global [`ManifestStore`]. /// /// Inserts a default [`ManifestStore`] if one does not yet exist. @@ -45,4 +45,7 @@ impl ManifestProviders { pub(super) fn get(&self, name: &SharedString) -> Option> { self.0.read().providers.get(name).cloned() } + pub(crate) fn manifest_file_names(&self) -> HashSet { + self.0.read().providers.keys().cloned().collect() + } } diff --git a/crates/project/src/manifest_tree/path_trie.rs b/crates/project/src/manifest_tree/path_trie.rs index 1a0736765a43b9e1365334de95eacbe9dbf64382..9cebfda25c69fa35b06cefe9ec744b5e6152a820 100644 --- a/crates/project/src/manifest_tree/path_trie.rs +++ b/crates/project/src/manifest_tree/path_trie.rs @@ -22,9 +22,9 @@ pub(super) struct RootPathTrie